| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 2.993684210526316, |
| "eval_steps": 500, |
| "global_step": 948, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.003157894736842105, |
| "grad_norm": 6.864107608795166, |
| "learning_rate": 1.0526315789473685e-07, |
| "loss": 1.2166, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.00631578947368421, |
| "grad_norm": 7.149033546447754, |
| "learning_rate": 2.105263157894737e-07, |
| "loss": 1.2292, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.009473684210526316, |
| "grad_norm": 6.731090545654297, |
| "learning_rate": 3.1578947368421055e-07, |
| "loss": 1.1835, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.01263157894736842, |
| "grad_norm": 7.016407012939453, |
| "learning_rate": 4.210526315789474e-07, |
| "loss": 1.2222, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.015789473684210527, |
| "grad_norm": 6.585211753845215, |
| "learning_rate": 5.263157894736843e-07, |
| "loss": 1.2031, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.018947368421052633, |
| "grad_norm": 6.720523834228516, |
| "learning_rate": 6.315789473684211e-07, |
| "loss": 1.1972, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.022105263157894735, |
| "grad_norm": 6.992764472961426, |
| "learning_rate": 7.368421052631579e-07, |
| "loss": 1.1867, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.02526315789473684, |
| "grad_norm": 6.5448174476623535, |
| "learning_rate": 8.421052631578948e-07, |
| "loss": 1.2113, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.028421052631578948, |
| "grad_norm": 6.110010147094727, |
| "learning_rate": 9.473684210526317e-07, |
| "loss": 1.1666, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.031578947368421054, |
| "grad_norm": 6.425546169281006, |
| "learning_rate": 1.0526315789473685e-06, |
| "loss": 1.1722, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.034736842105263156, |
| "grad_norm": 4.949359893798828, |
| "learning_rate": 1.1578947368421053e-06, |
| "loss": 1.1464, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.037894736842105266, |
| "grad_norm": 4.857133865356445, |
| "learning_rate": 1.2631578947368422e-06, |
| "loss": 1.1495, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.04105263157894737, |
| "grad_norm": 4.663426876068115, |
| "learning_rate": 1.3684210526315791e-06, |
| "loss": 1.1108, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.04421052631578947, |
| "grad_norm": 2.899270534515381, |
| "learning_rate": 1.4736842105263159e-06, |
| "loss": 1.0779, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.04736842105263158, |
| "grad_norm": 2.799199104309082, |
| "learning_rate": 1.5789473684210526e-06, |
| "loss": 1.0596, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.05052631578947368, |
| "grad_norm": 2.7462809085845947, |
| "learning_rate": 1.6842105263157895e-06, |
| "loss": 1.0846, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.05368421052631579, |
| "grad_norm": 2.6440353393554688, |
| "learning_rate": 1.7894736842105265e-06, |
| "loss": 1.0408, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.056842105263157895, |
| "grad_norm": 2.496100664138794, |
| "learning_rate": 1.8947368421052634e-06, |
| "loss": 1.0447, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 2.5369954109191895, |
| "learning_rate": 2.0000000000000003e-06, |
| "loss": 1.0338, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.06315789473684211, |
| "grad_norm": 3.36142635345459, |
| "learning_rate": 2.105263157894737e-06, |
| "loss": 1.0461, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.06631578947368422, |
| "grad_norm": 3.2484495639801025, |
| "learning_rate": 2.2105263157894738e-06, |
| "loss": 1.0202, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.06947368421052631, |
| "grad_norm": 3.0597786903381348, |
| "learning_rate": 2.3157894736842105e-06, |
| "loss": 1.0088, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.07263157894736842, |
| "grad_norm": 2.594919204711914, |
| "learning_rate": 2.4210526315789477e-06, |
| "loss": 0.9878, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.07578947368421053, |
| "grad_norm": 2.3219525814056396, |
| "learning_rate": 2.5263157894736844e-06, |
| "loss": 1.0149, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.07894736842105263, |
| "grad_norm": 1.812867283821106, |
| "learning_rate": 2.631578947368421e-06, |
| "loss": 0.9818, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.08210526315789474, |
| "grad_norm": 1.581230640411377, |
| "learning_rate": 2.7368421052631583e-06, |
| "loss": 0.9757, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.08526315789473685, |
| "grad_norm": 1.8027225732803345, |
| "learning_rate": 2.842105263157895e-06, |
| "loss": 0.9676, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.08842105263157894, |
| "grad_norm": 1.7598600387573242, |
| "learning_rate": 2.9473684210526317e-06, |
| "loss": 0.9181, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.09157894736842105, |
| "grad_norm": 1.7967835664749146, |
| "learning_rate": 3.052631578947369e-06, |
| "loss": 0.9336, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.09473684210526316, |
| "grad_norm": 1.3898029327392578, |
| "learning_rate": 3.157894736842105e-06, |
| "loss": 0.9156, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.09789473684210526, |
| "grad_norm": 1.2304682731628418, |
| "learning_rate": 3.2631578947368423e-06, |
| "loss": 0.936, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.10105263157894737, |
| "grad_norm": 1.1240628957748413, |
| "learning_rate": 3.368421052631579e-06, |
| "loss": 0.8984, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.10421052631578948, |
| "grad_norm": 1.1114623546600342, |
| "learning_rate": 3.473684210526316e-06, |
| "loss": 0.9032, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.10736842105263159, |
| "grad_norm": 1.2093194723129272, |
| "learning_rate": 3.578947368421053e-06, |
| "loss": 0.9466, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.11052631578947368, |
| "grad_norm": 1.1614065170288086, |
| "learning_rate": 3.6842105263157896e-06, |
| "loss": 0.8888, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.11368421052631579, |
| "grad_norm": 0.9620938897132874, |
| "learning_rate": 3.789473684210527e-06, |
| "loss": 0.8985, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.1168421052631579, |
| "grad_norm": 0.8165697455406189, |
| "learning_rate": 3.894736842105263e-06, |
| "loss": 0.8922, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 0.9624667763710022, |
| "learning_rate": 4.000000000000001e-06, |
| "loss": 0.9059, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.1231578947368421, |
| "grad_norm": 0.9526475667953491, |
| "learning_rate": 4.105263157894737e-06, |
| "loss": 0.8697, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.12631578947368421, |
| "grad_norm": 0.9052897691726685, |
| "learning_rate": 4.210526315789474e-06, |
| "loss": 0.8752, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.12947368421052632, |
| "grad_norm": 0.8724302053451538, |
| "learning_rate": 4.315789473684211e-06, |
| "loss": 0.8745, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.13263157894736843, |
| "grad_norm": 0.8744531273841858, |
| "learning_rate": 4.4210526315789476e-06, |
| "loss": 0.833, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.13578947368421052, |
| "grad_norm": 0.8191508650779724, |
| "learning_rate": 4.526315789473685e-06, |
| "loss": 0.8681, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.13894736842105262, |
| "grad_norm": 0.8925843238830566, |
| "learning_rate": 4.631578947368421e-06, |
| "loss": 0.8522, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.14210526315789473, |
| "grad_norm": 0.89957195520401, |
| "learning_rate": 4.736842105263158e-06, |
| "loss": 0.8732, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.14526315789473684, |
| "grad_norm": 0.8851773738861084, |
| "learning_rate": 4.842105263157895e-06, |
| "loss": 0.8991, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.14842105263157895, |
| "grad_norm": 0.7817597985267639, |
| "learning_rate": 4.947368421052632e-06, |
| "loss": 0.8711, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.15157894736842106, |
| "grad_norm": 0.7551989555358887, |
| "learning_rate": 5.052631578947369e-06, |
| "loss": 0.8306, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.15473684210526314, |
| "grad_norm": 0.8163503408432007, |
| "learning_rate": 5.157894736842106e-06, |
| "loss": 0.8403, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.15789473684210525, |
| "grad_norm": 0.7936963438987732, |
| "learning_rate": 5.263157894736842e-06, |
| "loss": 0.837, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.16105263157894736, |
| "grad_norm": 0.7440241575241089, |
| "learning_rate": 5.36842105263158e-06, |
| "loss": 0.8538, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.16421052631578947, |
| "grad_norm": 0.7333534359931946, |
| "learning_rate": 5.4736842105263165e-06, |
| "loss": 0.819, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.16736842105263158, |
| "grad_norm": 0.6583252549171448, |
| "learning_rate": 5.578947368421052e-06, |
| "loss": 0.7879, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.1705263157894737, |
| "grad_norm": 0.684806764125824, |
| "learning_rate": 5.68421052631579e-06, |
| "loss": 0.8325, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.1736842105263158, |
| "grad_norm": 0.7598180174827576, |
| "learning_rate": 5.789473684210527e-06, |
| "loss": 0.8341, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.17684210526315788, |
| "grad_norm": 0.8507996201515198, |
| "learning_rate": 5.8947368421052634e-06, |
| "loss": 0.8428, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 0.7600204348564148, |
| "learning_rate": 6e-06, |
| "loss": 0.818, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.1831578947368421, |
| "grad_norm": 0.6492787599563599, |
| "learning_rate": 6.105263157894738e-06, |
| "loss": 0.8346, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.1863157894736842, |
| "grad_norm": 0.6897916793823242, |
| "learning_rate": 6.2105263157894745e-06, |
| "loss": 0.8409, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.18947368421052632, |
| "grad_norm": 0.7259018421173096, |
| "learning_rate": 6.31578947368421e-06, |
| "loss": 0.8285, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.19263157894736843, |
| "grad_norm": 0.8188095688819885, |
| "learning_rate": 6.421052631578948e-06, |
| "loss": 0.8816, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.1957894736842105, |
| "grad_norm": 0.6784875988960266, |
| "learning_rate": 6.526315789473685e-06, |
| "loss": 0.8153, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.19894736842105262, |
| "grad_norm": 0.740426242351532, |
| "learning_rate": 6.631578947368421e-06, |
| "loss": 0.8243, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.20210526315789473, |
| "grad_norm": 0.7158690094947815, |
| "learning_rate": 6.736842105263158e-06, |
| "loss": 0.828, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.20526315789473684, |
| "grad_norm": 0.7803067564964294, |
| "learning_rate": 6.842105263157896e-06, |
| "loss": 0.7897, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.20842105263157895, |
| "grad_norm": 0.6472960710525513, |
| "learning_rate": 6.947368421052632e-06, |
| "loss": 0.7908, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.21157894736842106, |
| "grad_norm": 0.7025761008262634, |
| "learning_rate": 7.052631578947369e-06, |
| "loss": 0.8408, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.21473684210526317, |
| "grad_norm": 0.7511818408966064, |
| "learning_rate": 7.157894736842106e-06, |
| "loss": 0.7913, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.21789473684210525, |
| "grad_norm": 0.6920287609100342, |
| "learning_rate": 7.263157894736843e-06, |
| "loss": 0.8173, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.22105263157894736, |
| "grad_norm": 0.6897710561752319, |
| "learning_rate": 7.368421052631579e-06, |
| "loss": 0.8213, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.22421052631578947, |
| "grad_norm": 0.7388243675231934, |
| "learning_rate": 7.473684210526316e-06, |
| "loss": 0.8039, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.22736842105263158, |
| "grad_norm": 0.8480104804039001, |
| "learning_rate": 7.578947368421054e-06, |
| "loss": 0.8235, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.2305263157894737, |
| "grad_norm": 0.7924239635467529, |
| "learning_rate": 7.68421052631579e-06, |
| "loss": 0.8256, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.2336842105263158, |
| "grad_norm": 0.7806857228279114, |
| "learning_rate": 7.789473684210526e-06, |
| "loss": 0.8284, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.23684210526315788, |
| "grad_norm": 0.7032557129859924, |
| "learning_rate": 7.894736842105265e-06, |
| "loss": 0.7895, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 0.8388243913650513, |
| "learning_rate": 8.000000000000001e-06, |
| "loss": 0.8032, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.2431578947368421, |
| "grad_norm": 0.8523675203323364, |
| "learning_rate": 8.105263157894736e-06, |
| "loss": 0.8118, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.2463157894736842, |
| "grad_norm": 0.7454504370689392, |
| "learning_rate": 8.210526315789475e-06, |
| "loss": 0.7955, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.24947368421052632, |
| "grad_norm": 0.737902820110321, |
| "learning_rate": 8.315789473684212e-06, |
| "loss": 0.8337, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.25263157894736843, |
| "grad_norm": 0.7588112950325012, |
| "learning_rate": 8.421052631578948e-06, |
| "loss": 0.7851, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.2557894736842105, |
| "grad_norm": 0.812930166721344, |
| "learning_rate": 8.526315789473685e-06, |
| "loss": 0.8024, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.25894736842105265, |
| "grad_norm": 0.9560610055923462, |
| "learning_rate": 8.631578947368422e-06, |
| "loss": 0.8093, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.26210526315789473, |
| "grad_norm": 0.9282992482185364, |
| "learning_rate": 8.736842105263158e-06, |
| "loss": 0.8169, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.26526315789473687, |
| "grad_norm": 0.7830009460449219, |
| "learning_rate": 8.842105263157895e-06, |
| "loss": 0.8062, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.26842105263157895, |
| "grad_norm": 0.8576416969299316, |
| "learning_rate": 8.947368421052632e-06, |
| "loss": 0.8023, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.27157894736842103, |
| "grad_norm": 0.8227952122688293, |
| "learning_rate": 9.05263157894737e-06, |
| "loss": 0.805, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.27473684210526317, |
| "grad_norm": 0.8914945721626282, |
| "learning_rate": 9.157894736842105e-06, |
| "loss": 0.8269, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.27789473684210525, |
| "grad_norm": 0.81199711561203, |
| "learning_rate": 9.263157894736842e-06, |
| "loss": 0.8056, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.2810526315789474, |
| "grad_norm": 1.0146549940109253, |
| "learning_rate": 9.36842105263158e-06, |
| "loss": 0.7905, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.28421052631578947, |
| "grad_norm": 0.8971161246299744, |
| "learning_rate": 9.473684210526315e-06, |
| "loss": 0.8079, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.2873684210526316, |
| "grad_norm": 0.9439178705215454, |
| "learning_rate": 9.578947368421054e-06, |
| "loss": 0.8126, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.2905263157894737, |
| "grad_norm": 0.9185954332351685, |
| "learning_rate": 9.68421052631579e-06, |
| "loss": 0.7945, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.29368421052631577, |
| "grad_norm": 0.7818227410316467, |
| "learning_rate": 9.789473684210527e-06, |
| "loss": 0.7825, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.2968421052631579, |
| "grad_norm": 0.7514786124229431, |
| "learning_rate": 9.894736842105264e-06, |
| "loss": 0.8049, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 1.0437406301498413, |
| "learning_rate": 1e-05, |
| "loss": 0.8204, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.3031578947368421, |
| "grad_norm": 0.921838104724884, |
| "learning_rate": 9.999966088952842e-06, |
| "loss": 0.8107, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.3063157894736842, |
| "grad_norm": 0.7265084981918335, |
| "learning_rate": 9.99986435627135e-06, |
| "loss": 0.8067, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.3094736842105263, |
| "grad_norm": 1.0714375972747803, |
| "learning_rate": 9.999694803335468e-06, |
| "loss": 0.8052, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.3126315789473684, |
| "grad_norm": 0.9621460437774658, |
| "learning_rate": 9.999457432445087e-06, |
| "loss": 0.8122, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.3157894736842105, |
| "grad_norm": 0.7225667834281921, |
| "learning_rate": 9.999152246820001e-06, |
| "loss": 0.8104, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.31894736842105265, |
| "grad_norm": 0.8660763502120972, |
| "learning_rate": 9.998779250599877e-06, |
| "loss": 0.8005, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.32210526315789473, |
| "grad_norm": 0.951799213886261, |
| "learning_rate": 9.998338448844193e-06, |
| "loss": 0.8022, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.32526315789473687, |
| "grad_norm": 0.8598304986953735, |
| "learning_rate": 9.997829847532165e-06, |
| "loss": 0.7862, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.32842105263157895, |
| "grad_norm": 0.7481747269630432, |
| "learning_rate": 9.99725345356268e-06, |
| "loss": 0.7933, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.33157894736842103, |
| "grad_norm": 0.9335681200027466, |
| "learning_rate": 9.996609274754183e-06, |
| "loss": 0.7822, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.33473684210526317, |
| "grad_norm": 0.8390291929244995, |
| "learning_rate": 9.995897319844588e-06, |
| "loss": 0.7838, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.33789473684210525, |
| "grad_norm": 0.7386859655380249, |
| "learning_rate": 9.995117598491146e-06, |
| "loss": 0.7872, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.3410526315789474, |
| "grad_norm": 0.8380564451217651, |
| "learning_rate": 9.994270121270327e-06, |
| "loss": 0.7568, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.34421052631578947, |
| "grad_norm": 0.7734573483467102, |
| "learning_rate": 9.993354899677665e-06, |
| "loss": 0.8004, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.3473684210526316, |
| "grad_norm": 0.8333052396774292, |
| "learning_rate": 9.99237194612761e-06, |
| "loss": 0.7686, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.3505263157894737, |
| "grad_norm": 0.9416193962097168, |
| "learning_rate": 9.991321273953357e-06, |
| "loss": 0.7973, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.35368421052631577, |
| "grad_norm": 0.7098072171211243, |
| "learning_rate": 9.99020289740666e-06, |
| "loss": 0.7547, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.3568421052631579, |
| "grad_norm": 0.9404763579368591, |
| "learning_rate": 9.989016831657652e-06, |
| "loss": 0.8093, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 1.0210902690887451, |
| "learning_rate": 9.987763092794621e-06, |
| "loss": 0.7939, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.3631578947368421, |
| "grad_norm": 0.7902900576591492, |
| "learning_rate": 9.986441697823808e-06, |
| "loss": 0.7889, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.3663157894736842, |
| "grad_norm": 0.9824138283729553, |
| "learning_rate": 9.985052664669168e-06, |
| "loss": 0.7888, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.36947368421052634, |
| "grad_norm": 0.9166287183761597, |
| "learning_rate": 9.983596012172127e-06, |
| "loss": 0.7845, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.3726315789473684, |
| "grad_norm": 0.7522174715995789, |
| "learning_rate": 9.982071760091334e-06, |
| "loss": 0.7619, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.3757894736842105, |
| "grad_norm": 0.9811515212059021, |
| "learning_rate": 9.980479929102377e-06, |
| "loss": 0.7943, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.37894736842105264, |
| "grad_norm": 0.7924212217330933, |
| "learning_rate": 9.978820540797521e-06, |
| "loss": 0.7928, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.3821052631578947, |
| "grad_norm": 0.8440699577331543, |
| "learning_rate": 9.977093617685404e-06, |
| "loss": 0.8005, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.38526315789473686, |
| "grad_norm": 0.7170959115028381, |
| "learning_rate": 9.975299183190734e-06, |
| "loss": 0.7844, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.38842105263157894, |
| "grad_norm": 0.9932653307914734, |
| "learning_rate": 9.973437261653973e-06, |
| "loss": 0.7932, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.391578947368421, |
| "grad_norm": 0.7866117358207703, |
| "learning_rate": 9.971507878331005e-06, |
| "loss": 0.7672, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.39473684210526316, |
| "grad_norm": 0.791227400302887, |
| "learning_rate": 9.96951105939279e-06, |
| "loss": 0.7678, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.39789473684210525, |
| "grad_norm": 1.0364530086517334, |
| "learning_rate": 9.96744683192502e-06, |
| "loss": 0.7814, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.4010526315789474, |
| "grad_norm": 0.6801534295082092, |
| "learning_rate": 9.96531522392774e-06, |
| "loss": 0.751, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.40421052631578946, |
| "grad_norm": 1.0359455347061157, |
| "learning_rate": 9.963116264314974e-06, |
| "loss": 0.7701, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.4073684210526316, |
| "grad_norm": 0.8113856315612793, |
| "learning_rate": 9.960849982914332e-06, |
| "loss": 0.7924, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.4105263157894737, |
| "grad_norm": 0.7722939848899841, |
| "learning_rate": 9.958516410466601e-06, |
| "loss": 0.7891, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.41368421052631577, |
| "grad_norm": 0.8695185780525208, |
| "learning_rate": 9.956115578625339e-06, |
| "loss": 0.7795, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.4168421052631579, |
| "grad_norm": 0.6542952060699463, |
| "learning_rate": 9.953647519956432e-06, |
| "loss": 0.7518, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 0.7686191201210022, |
| "learning_rate": 9.951112267937663e-06, |
| "loss": 0.7851, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.4231578947368421, |
| "grad_norm": 0.7790025472640991, |
| "learning_rate": 9.948509856958253e-06, |
| "loss": 0.7453, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.4263157894736842, |
| "grad_norm": 0.7110816240310669, |
| "learning_rate": 9.945840322318391e-06, |
| "loss": 0.8121, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.42947368421052634, |
| "grad_norm": 0.7633756995201111, |
| "learning_rate": 9.943103700228768e-06, |
| "loss": 0.7982, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.4326315789473684, |
| "grad_norm": 0.7634323239326477, |
| "learning_rate": 9.940300027810067e-06, |
| "loss": 0.8097, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.4357894736842105, |
| "grad_norm": 0.7589544057846069, |
| "learning_rate": 9.93742934309248e-06, |
| "loss": 0.7697, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.43894736842105264, |
| "grad_norm": 0.820608377456665, |
| "learning_rate": 9.934491685015173e-06, |
| "loss": 0.7917, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.4421052631578947, |
| "grad_norm": 0.801396906375885, |
| "learning_rate": 9.931487093425775e-06, |
| "loss": 0.7908, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.44526315789473686, |
| "grad_norm": 0.9706858992576599, |
| "learning_rate": 9.928415609079821e-06, |
| "loss": 0.7753, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.44842105263157894, |
| "grad_norm": 0.818867027759552, |
| "learning_rate": 9.925277273640211e-06, |
| "loss": 0.7884, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.4515789473684211, |
| "grad_norm": 0.9297605156898499, |
| "learning_rate": 9.922072129676644e-06, |
| "loss": 0.8113, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.45473684210526316, |
| "grad_norm": 0.9068427681922913, |
| "learning_rate": 9.918800220665035e-06, |
| "loss": 0.808, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.45789473684210524, |
| "grad_norm": 0.7996309399604797, |
| "learning_rate": 9.915461590986926e-06, |
| "loss": 0.7967, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.4610526315789474, |
| "grad_norm": 0.777631938457489, |
| "learning_rate": 9.912056285928891e-06, |
| "loss": 0.8123, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.46421052631578946, |
| "grad_norm": 0.7640287280082703, |
| "learning_rate": 9.908584351681911e-06, |
| "loss": 0.7942, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.4673684210526316, |
| "grad_norm": 0.900375247001648, |
| "learning_rate": 9.90504583534076e-06, |
| "loss": 0.7916, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.4705263157894737, |
| "grad_norm": 0.8545629382133484, |
| "learning_rate": 9.901440784903354e-06, |
| "loss": 0.7946, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.47368421052631576, |
| "grad_norm": 0.739754319190979, |
| "learning_rate": 9.897769249270106e-06, |
| "loss": 0.797, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.4768421052631579, |
| "grad_norm": 0.814207136631012, |
| "learning_rate": 9.894031278243266e-06, |
| "loss": 0.7636, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 0.8900481462478638, |
| "learning_rate": 9.890226922526238e-06, |
| "loss": 0.7918, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.4831578947368421, |
| "grad_norm": 0.7793235182762146, |
| "learning_rate": 9.886356233722894e-06, |
| "loss": 0.7956, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.4863157894736842, |
| "grad_norm": 0.890493631362915, |
| "learning_rate": 9.88241926433688e-06, |
| "loss": 0.7643, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.48947368421052634, |
| "grad_norm": 0.8794576525688171, |
| "learning_rate": 9.878416067770898e-06, |
| "loss": 0.7929, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.4926315789473684, |
| "grad_norm": 0.7571336627006531, |
| "learning_rate": 9.874346698325983e-06, |
| "loss": 0.7852, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.4957894736842105, |
| "grad_norm": 0.8208326101303101, |
| "learning_rate": 9.870211211200766e-06, |
| "loss": 0.8021, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.49894736842105264, |
| "grad_norm": 0.904834508895874, |
| "learning_rate": 9.866009662490727e-06, |
| "loss": 0.7809, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.5021052631578947, |
| "grad_norm": 0.8771081566810608, |
| "learning_rate": 9.861742109187433e-06, |
| "loss": 0.8044, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.5052631578947369, |
| "grad_norm": 0.7162814140319824, |
| "learning_rate": 9.857408609177763e-06, |
| "loss": 0.7935, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.508421052631579, |
| "grad_norm": 0.8736246824264526, |
| "learning_rate": 9.853009221243129e-06, |
| "loss": 0.781, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.511578947368421, |
| "grad_norm": 0.7425425052642822, |
| "learning_rate": 9.848544005058668e-06, |
| "loss": 0.7619, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.5147368421052632, |
| "grad_norm": 0.7853212356567383, |
| "learning_rate": 9.844013021192447e-06, |
| "loss": 0.77, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.5178947368421053, |
| "grad_norm": 0.7985186576843262, |
| "learning_rate": 9.839416331104625e-06, |
| "loss": 0.7823, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.5210526315789473, |
| "grad_norm": 0.8637293577194214, |
| "learning_rate": 9.834753997146633e-06, |
| "loss": 0.7598, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.5242105263157895, |
| "grad_norm": 0.7811316847801208, |
| "learning_rate": 9.830026082560324e-06, |
| "loss": 0.7787, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.5273684210526316, |
| "grad_norm": 0.795226514339447, |
| "learning_rate": 9.825232651477109e-06, |
| "loss": 0.7843, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.5305263157894737, |
| "grad_norm": 0.9816672205924988, |
| "learning_rate": 9.820373768917095e-06, |
| "loss": 0.778, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.5336842105263158, |
| "grad_norm": 0.7347379922866821, |
| "learning_rate": 9.815449500788203e-06, |
| "loss": 0.8002, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.5368421052631579, |
| "grad_norm": 0.8775848746299744, |
| "learning_rate": 9.810459913885265e-06, |
| "loss": 0.7484, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 0.8044248819351196, |
| "learning_rate": 9.805405075889129e-06, |
| "loss": 0.7607, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.5431578947368421, |
| "grad_norm": 0.8173661231994629, |
| "learning_rate": 9.800285055365737e-06, |
| "loss": 0.7654, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.5463157894736842, |
| "grad_norm": 0.7608327269554138, |
| "learning_rate": 9.79509992176519e-06, |
| "loss": 0.7716, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.5494736842105263, |
| "grad_norm": 0.7798534631729126, |
| "learning_rate": 9.789849745420811e-06, |
| "loss": 0.7906, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.5526315789473685, |
| "grad_norm": 0.7842231392860413, |
| "learning_rate": 9.784534597548194e-06, |
| "loss": 0.7752, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.5557894736842105, |
| "grad_norm": 0.7228990793228149, |
| "learning_rate": 9.779154550244228e-06, |
| "loss": 0.7626, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.5589473684210526, |
| "grad_norm": 0.8362434506416321, |
| "learning_rate": 9.77370967648613e-06, |
| "loss": 0.7547, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.5621052631578948, |
| "grad_norm": 0.7536957859992981, |
| "learning_rate": 9.768200050130446e-06, |
| "loss": 0.7658, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.5652631578947368, |
| "grad_norm": 0.7978958487510681, |
| "learning_rate": 9.76262574591206e-06, |
| "loss": 0.7753, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.5684210526315789, |
| "grad_norm": 0.7004671692848206, |
| "learning_rate": 9.756986839443166e-06, |
| "loss": 0.8162, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.5715789473684211, |
| "grad_norm": 0.7977942228317261, |
| "learning_rate": 9.751283407212253e-06, |
| "loss": 0.7756, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.5747368421052632, |
| "grad_norm": 0.7080273628234863, |
| "learning_rate": 9.745515526583066e-06, |
| "loss": 0.7631, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.5778947368421052, |
| "grad_norm": 0.8024187088012695, |
| "learning_rate": 9.739683275793554e-06, |
| "loss": 0.7908, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.5810526315789474, |
| "grad_norm": 0.7169841527938843, |
| "learning_rate": 9.73378673395481e-06, |
| "loss": 0.7602, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.5842105263157895, |
| "grad_norm": 0.7052045464515686, |
| "learning_rate": 9.727825981049994e-06, |
| "loss": 0.7795, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.5873684210526315, |
| "grad_norm": 0.7155394554138184, |
| "learning_rate": 9.72180109793326e-06, |
| "loss": 0.7919, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.5905263157894737, |
| "grad_norm": 0.7038570046424866, |
| "learning_rate": 9.715712166328643e-06, |
| "loss": 0.799, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.5936842105263158, |
| "grad_norm": 0.6392240524291992, |
| "learning_rate": 9.709559268828963e-06, |
| "loss": 0.7868, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.5968421052631578, |
| "grad_norm": 0.748368501663208, |
| "learning_rate": 9.703342488894699e-06, |
| "loss": 0.7572, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 0.6144984364509583, |
| "learning_rate": 9.697061910852857e-06, |
| "loss": 0.7579, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.6031578947368421, |
| "grad_norm": 0.7086066007614136, |
| "learning_rate": 9.690717619895828e-06, |
| "loss": 0.7773, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.6063157894736843, |
| "grad_norm": 0.7267181277275085, |
| "learning_rate": 9.684309702080234e-06, |
| "loss": 0.8062, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.6094736842105263, |
| "grad_norm": 0.6966003179550171, |
| "learning_rate": 9.677838244325754e-06, |
| "loss": 0.7542, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.6126315789473684, |
| "grad_norm": 0.7743439674377441, |
| "learning_rate": 9.671303334413952e-06, |
| "loss": 0.7607, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.6157894736842106, |
| "grad_norm": 0.698222279548645, |
| "learning_rate": 9.664705060987085e-06, |
| "loss": 0.7181, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.6189473684210526, |
| "grad_norm": 0.738317608833313, |
| "learning_rate": 9.658043513546898e-06, |
| "loss": 0.8108, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.6221052631578947, |
| "grad_norm": 0.7287928462028503, |
| "learning_rate": 9.651318782453407e-06, |
| "loss": 0.7593, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.6252631578947369, |
| "grad_norm": 0.7296643853187561, |
| "learning_rate": 9.644530958923683e-06, |
| "loss": 0.7683, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.628421052631579, |
| "grad_norm": 0.6896747946739197, |
| "learning_rate": 9.637680135030609e-06, |
| "loss": 0.7851, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.631578947368421, |
| "grad_norm": 0.722059428691864, |
| "learning_rate": 9.63076640370163e-06, |
| "loss": 0.7837, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.6347368421052632, |
| "grad_norm": 0.7174692749977112, |
| "learning_rate": 9.623789858717491e-06, |
| "loss": 0.7959, |
| "step": 201 |
| }, |
| { |
| "epoch": 0.6378947368421053, |
| "grad_norm": 0.769080400466919, |
| "learning_rate": 9.616750594710972e-06, |
| "loss": 0.7711, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.6410526315789473, |
| "grad_norm": 0.857413113117218, |
| "learning_rate": 9.6096487071656e-06, |
| "loss": 0.7967, |
| "step": 203 |
| }, |
| { |
| "epoch": 0.6442105263157895, |
| "grad_norm": 0.6896303296089172, |
| "learning_rate": 9.602484292414348e-06, |
| "loss": 0.7541, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.6473684210526316, |
| "grad_norm": 0.7579444050788879, |
| "learning_rate": 9.595257447638344e-06, |
| "loss": 0.7721, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.6505263157894737, |
| "grad_norm": 0.8411123752593994, |
| "learning_rate": 9.587968270865534e-06, |
| "loss": 0.8087, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.6536842105263158, |
| "grad_norm": 0.769237220287323, |
| "learning_rate": 9.580616860969365e-06, |
| "loss": 0.7511, |
| "step": 207 |
| }, |
| { |
| "epoch": 0.6568421052631579, |
| "grad_norm": 0.6318850517272949, |
| "learning_rate": 9.573203317667442e-06, |
| "loss": 0.7552, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 0.7551825046539307, |
| "learning_rate": 9.56572774152017e-06, |
| "loss": 0.7795, |
| "step": 209 |
| }, |
| { |
| "epoch": 0.6631578947368421, |
| "grad_norm": 0.8102651834487915, |
| "learning_rate": 9.558190233929396e-06, |
| "loss": 0.7973, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.6663157894736842, |
| "grad_norm": 0.7229571342468262, |
| "learning_rate": 9.55059089713703e-06, |
| "loss": 0.7906, |
| "step": 211 |
| }, |
| { |
| "epoch": 0.6694736842105263, |
| "grad_norm": 0.6960732936859131, |
| "learning_rate": 9.54292983422366e-06, |
| "loss": 0.755, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.6726315789473685, |
| "grad_norm": 0.7564675211906433, |
| "learning_rate": 9.53520714910715e-06, |
| "loss": 0.7993, |
| "step": 213 |
| }, |
| { |
| "epoch": 0.6757894736842105, |
| "grad_norm": 0.7037273645401001, |
| "learning_rate": 9.527422946541238e-06, |
| "loss": 0.7898, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.6789473684210526, |
| "grad_norm": 0.735377311706543, |
| "learning_rate": 9.519577332114107e-06, |
| "loss": 0.7599, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.6821052631578948, |
| "grad_norm": 0.6795523762702942, |
| "learning_rate": 9.511670412246956e-06, |
| "loss": 0.7779, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.6852631578947368, |
| "grad_norm": 0.6760138273239136, |
| "learning_rate": 9.503702294192563e-06, |
| "loss": 0.7759, |
| "step": 217 |
| }, |
| { |
| "epoch": 0.6884210526315789, |
| "grad_norm": 0.6858091354370117, |
| "learning_rate": 9.495673086033813e-06, |
| "loss": 0.7508, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.6915789473684211, |
| "grad_norm": 0.8286890983581543, |
| "learning_rate": 9.487582896682252e-06, |
| "loss": 0.7454, |
| "step": 219 |
| }, |
| { |
| "epoch": 0.6947368421052632, |
| "grad_norm": 0.8973384499549866, |
| "learning_rate": 9.479431835876596e-06, |
| "loss": 0.7616, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.6978947368421052, |
| "grad_norm": 0.7554185390472412, |
| "learning_rate": 9.471220014181247e-06, |
| "loss": 0.8202, |
| "step": 221 |
| }, |
| { |
| "epoch": 0.7010526315789474, |
| "grad_norm": 0.7141987681388855, |
| "learning_rate": 9.462947542984795e-06, |
| "loss": 0.7863, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.7042105263157895, |
| "grad_norm": 0.9621738195419312, |
| "learning_rate": 9.454614534498506e-06, |
| "loss": 0.7748, |
| "step": 223 |
| }, |
| { |
| "epoch": 0.7073684210526315, |
| "grad_norm": 0.8891428709030151, |
| "learning_rate": 9.446221101754795e-06, |
| "loss": 0.7568, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.7105263157894737, |
| "grad_norm": 0.8448644876480103, |
| "learning_rate": 9.4377673586057e-06, |
| "loss": 0.7561, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.7136842105263158, |
| "grad_norm": 0.744163453578949, |
| "learning_rate": 9.429253419721335e-06, |
| "loss": 0.8085, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.716842105263158, |
| "grad_norm": 0.7832540273666382, |
| "learning_rate": 9.420679400588334e-06, |
| "loss": 0.7592, |
| "step": 227 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 0.8211809992790222, |
| "learning_rate": 9.412045417508281e-06, |
| "loss": 0.7546, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.7231578947368421, |
| "grad_norm": 0.7208123803138733, |
| "learning_rate": 9.40335158759614e-06, |
| "loss": 0.7848, |
| "step": 229 |
| }, |
| { |
| "epoch": 0.7263157894736842, |
| "grad_norm": 0.770294725894928, |
| "learning_rate": 9.394598028778664e-06, |
| "loss": 0.7693, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.7294736842105263, |
| "grad_norm": 0.7772082686424255, |
| "learning_rate": 9.385784859792787e-06, |
| "loss": 0.7752, |
| "step": 231 |
| }, |
| { |
| "epoch": 0.7326315789473684, |
| "grad_norm": 0.7851942181587219, |
| "learning_rate": 9.376912200184029e-06, |
| "loss": 0.7858, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.7357894736842105, |
| "grad_norm": 0.9354181885719299, |
| "learning_rate": 9.367980170304857e-06, |
| "loss": 0.7474, |
| "step": 233 |
| }, |
| { |
| "epoch": 0.7389473684210527, |
| "grad_norm": 0.7733820676803589, |
| "learning_rate": 9.35898889131307e-06, |
| "loss": 0.7727, |
| "step": 234 |
| }, |
| { |
| "epoch": 0.7421052631578947, |
| "grad_norm": 0.746245265007019, |
| "learning_rate": 9.349938485170139e-06, |
| "loss": 0.7459, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.7452631578947368, |
| "grad_norm": 0.8601150512695312, |
| "learning_rate": 9.340829074639566e-06, |
| "loss": 0.768, |
| "step": 236 |
| }, |
| { |
| "epoch": 0.748421052631579, |
| "grad_norm": 0.7379437685012817, |
| "learning_rate": 9.331660783285208e-06, |
| "loss": 0.7667, |
| "step": 237 |
| }, |
| { |
| "epoch": 0.751578947368421, |
| "grad_norm": 0.7364124059677124, |
| "learning_rate": 9.322433735469614e-06, |
| "loss": 0.799, |
| "step": 238 |
| }, |
| { |
| "epoch": 0.7547368421052632, |
| "grad_norm": 0.7081237435340881, |
| "learning_rate": 9.313148056352321e-06, |
| "loss": 0.7464, |
| "step": 239 |
| }, |
| { |
| "epoch": 0.7578947368421053, |
| "grad_norm": 0.7287322878837585, |
| "learning_rate": 9.303803871888172e-06, |
| "loss": 0.7915, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.7610526315789473, |
| "grad_norm": 0.7040133476257324, |
| "learning_rate": 9.2944013088256e-06, |
| "loss": 0.7631, |
| "step": 241 |
| }, |
| { |
| "epoch": 0.7642105263157895, |
| "grad_norm": 0.7509044408798218, |
| "learning_rate": 9.284940494704906e-06, |
| "loss": 0.7836, |
| "step": 242 |
| }, |
| { |
| "epoch": 0.7673684210526316, |
| "grad_norm": 0.7864567041397095, |
| "learning_rate": 9.275421557856536e-06, |
| "loss": 0.7547, |
| "step": 243 |
| }, |
| { |
| "epoch": 0.7705263157894737, |
| "grad_norm": 0.696180522441864, |
| "learning_rate": 9.26584462739934e-06, |
| "loss": 0.7908, |
| "step": 244 |
| }, |
| { |
| "epoch": 0.7736842105263158, |
| "grad_norm": 0.7591786980628967, |
| "learning_rate": 9.25620983323881e-06, |
| "loss": 0.7663, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.7768421052631579, |
| "grad_norm": 0.6878987550735474, |
| "learning_rate": 9.246517306065332e-06, |
| "loss": 0.7363, |
| "step": 246 |
| }, |
| { |
| "epoch": 0.78, |
| "grad_norm": 0.7305520176887512, |
| "learning_rate": 9.236767177352403e-06, |
| "loss": 0.7649, |
| "step": 247 |
| }, |
| { |
| "epoch": 0.783157894736842, |
| "grad_norm": 0.7379259467124939, |
| "learning_rate": 9.226959579354855e-06, |
| "loss": 0.7733, |
| "step": 248 |
| }, |
| { |
| "epoch": 0.7863157894736842, |
| "grad_norm": 0.7051509618759155, |
| "learning_rate": 9.217094645107052e-06, |
| "loss": 0.771, |
| "step": 249 |
| }, |
| { |
| "epoch": 0.7894736842105263, |
| "grad_norm": 0.7959958910942078, |
| "learning_rate": 9.207172508421099e-06, |
| "loss": 0.7529, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.7926315789473685, |
| "grad_norm": 0.6911975145339966, |
| "learning_rate": 9.197193303885008e-06, |
| "loss": 0.7722, |
| "step": 251 |
| }, |
| { |
| "epoch": 0.7957894736842105, |
| "grad_norm": 0.8246911764144897, |
| "learning_rate": 9.187157166860894e-06, |
| "loss": 0.768, |
| "step": 252 |
| }, |
| { |
| "epoch": 0.7989473684210526, |
| "grad_norm": 0.7509826421737671, |
| "learning_rate": 9.177064233483121e-06, |
| "loss": 0.7869, |
| "step": 253 |
| }, |
| { |
| "epoch": 0.8021052631578948, |
| "grad_norm": 0.7473223209381104, |
| "learning_rate": 9.166914640656467e-06, |
| "loss": 0.7515, |
| "step": 254 |
| }, |
| { |
| "epoch": 0.8052631578947368, |
| "grad_norm": 0.8410593867301941, |
| "learning_rate": 9.156708526054257e-06, |
| "loss": 0.8106, |
| "step": 255 |
| }, |
| { |
| "epoch": 0.8084210526315789, |
| "grad_norm": 0.7072636485099792, |
| "learning_rate": 9.146446028116508e-06, |
| "loss": 0.7475, |
| "step": 256 |
| }, |
| { |
| "epoch": 0.8115789473684211, |
| "grad_norm": 0.7030969858169556, |
| "learning_rate": 9.136127286048038e-06, |
| "loss": 0.7567, |
| "step": 257 |
| }, |
| { |
| "epoch": 0.8147368421052632, |
| "grad_norm": 0.8154286742210388, |
| "learning_rate": 9.125752439816588e-06, |
| "loss": 0.7884, |
| "step": 258 |
| }, |
| { |
| "epoch": 0.8178947368421052, |
| "grad_norm": 0.6962621212005615, |
| "learning_rate": 9.115321630150918e-06, |
| "loss": 0.7688, |
| "step": 259 |
| }, |
| { |
| "epoch": 0.8210526315789474, |
| "grad_norm": 0.7069746255874634, |
| "learning_rate": 9.104834998538899e-06, |
| "loss": 0.7313, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.8242105263157895, |
| "grad_norm": 0.7636252045631409, |
| "learning_rate": 9.094292687225594e-06, |
| "loss": 0.7617, |
| "step": 261 |
| }, |
| { |
| "epoch": 0.8273684210526315, |
| "grad_norm": 0.7339772582054138, |
| "learning_rate": 9.08369483921133e-06, |
| "loss": 0.7469, |
| "step": 262 |
| }, |
| { |
| "epoch": 0.8305263157894737, |
| "grad_norm": 0.71903395652771, |
| "learning_rate": 9.073041598249757e-06, |
| "loss": 0.7662, |
| "step": 263 |
| }, |
| { |
| "epoch": 0.8336842105263158, |
| "grad_norm": 0.7993423938751221, |
| "learning_rate": 9.062333108845897e-06, |
| "loss": 0.7514, |
| "step": 264 |
| }, |
| { |
| "epoch": 0.8368421052631579, |
| "grad_norm": 0.8029423356056213, |
| "learning_rate": 9.051569516254186e-06, |
| "loss": 0.7457, |
| "step": 265 |
| }, |
| { |
| "epoch": 0.84, |
| "grad_norm": 0.6745309233665466, |
| "learning_rate": 9.040750966476502e-06, |
| "loss": 0.7591, |
| "step": 266 |
| }, |
| { |
| "epoch": 0.8431578947368421, |
| "grad_norm": 0.8080049753189087, |
| "learning_rate": 9.029877606260187e-06, |
| "loss": 0.7712, |
| "step": 267 |
| }, |
| { |
| "epoch": 0.8463157894736842, |
| "grad_norm": 0.6188151836395264, |
| "learning_rate": 9.018949583096051e-06, |
| "loss": 0.7339, |
| "step": 268 |
| }, |
| { |
| "epoch": 0.8494736842105263, |
| "grad_norm": 0.7027499079704285, |
| "learning_rate": 9.00796704521638e-06, |
| "loss": 0.7651, |
| "step": 269 |
| }, |
| { |
| "epoch": 0.8526315789473684, |
| "grad_norm": 0.6698938608169556, |
| "learning_rate": 8.996930141592915e-06, |
| "loss": 0.7648, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.8557894736842105, |
| "grad_norm": 0.610205352306366, |
| "learning_rate": 8.985839021934843e-06, |
| "loss": 0.7663, |
| "step": 271 |
| }, |
| { |
| "epoch": 0.8589473684210527, |
| "grad_norm": 0.6673887372016907, |
| "learning_rate": 8.974693836686755e-06, |
| "loss": 0.7512, |
| "step": 272 |
| }, |
| { |
| "epoch": 0.8621052631578947, |
| "grad_norm": 0.6559451818466187, |
| "learning_rate": 8.963494737026612e-06, |
| "loss": 0.7553, |
| "step": 273 |
| }, |
| { |
| "epoch": 0.8652631578947368, |
| "grad_norm": 0.679578959941864, |
| "learning_rate": 8.952241874863695e-06, |
| "loss": 0.7456, |
| "step": 274 |
| }, |
| { |
| "epoch": 0.868421052631579, |
| "grad_norm": 0.6281573176383972, |
| "learning_rate": 8.940935402836535e-06, |
| "loss": 0.7251, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.871578947368421, |
| "grad_norm": 0.6603413820266724, |
| "learning_rate": 8.92957547431086e-06, |
| "loss": 0.7518, |
| "step": 276 |
| }, |
| { |
| "epoch": 0.8747368421052631, |
| "grad_norm": 0.759972870349884, |
| "learning_rate": 8.918162243377494e-06, |
| "loss": 0.755, |
| "step": 277 |
| }, |
| { |
| "epoch": 0.8778947368421053, |
| "grad_norm": 0.7080827951431274, |
| "learning_rate": 8.906695864850284e-06, |
| "loss": 0.7868, |
| "step": 278 |
| }, |
| { |
| "epoch": 0.8810526315789474, |
| "grad_norm": 0.6595768928527832, |
| "learning_rate": 8.895176494263993e-06, |
| "loss": 0.7536, |
| "step": 279 |
| }, |
| { |
| "epoch": 0.8842105263157894, |
| "grad_norm": 0.6742393374443054, |
| "learning_rate": 8.883604287872186e-06, |
| "loss": 0.7771, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.8873684210526316, |
| "grad_norm": 0.7637141942977905, |
| "learning_rate": 8.871979402645116e-06, |
| "loss": 0.7723, |
| "step": 281 |
| }, |
| { |
| "epoch": 0.8905263157894737, |
| "grad_norm": 0.6951268315315247, |
| "learning_rate": 8.860301996267601e-06, |
| "loss": 0.7668, |
| "step": 282 |
| }, |
| { |
| "epoch": 0.8936842105263157, |
| "grad_norm": 0.672383189201355, |
| "learning_rate": 8.848572227136869e-06, |
| "loss": 0.759, |
| "step": 283 |
| }, |
| { |
| "epoch": 0.8968421052631579, |
| "grad_norm": 0.8001788854598999, |
| "learning_rate": 8.83679025436042e-06, |
| "loss": 0.7903, |
| "step": 284 |
| }, |
| { |
| "epoch": 0.9, |
| "grad_norm": 0.7648778557777405, |
| "learning_rate": 8.824956237753872e-06, |
| "loss": 0.7746, |
| "step": 285 |
| }, |
| { |
| "epoch": 0.9031578947368422, |
| "grad_norm": 0.6406739354133606, |
| "learning_rate": 8.813070337838781e-06, |
| "loss": 0.7771, |
| "step": 286 |
| }, |
| { |
| "epoch": 0.9063157894736842, |
| "grad_norm": 0.6822999715805054, |
| "learning_rate": 8.80113271584047e-06, |
| "loss": 0.7592, |
| "step": 287 |
| }, |
| { |
| "epoch": 0.9094736842105263, |
| "grad_norm": 0.7665005326271057, |
| "learning_rate": 8.789143533685847e-06, |
| "loss": 0.747, |
| "step": 288 |
| }, |
| { |
| "epoch": 0.9126315789473685, |
| "grad_norm": 0.7119230628013611, |
| "learning_rate": 8.777102954001199e-06, |
| "loss": 0.7686, |
| "step": 289 |
| }, |
| { |
| "epoch": 0.9157894736842105, |
| "grad_norm": 0.6693398356437683, |
| "learning_rate": 8.765011140109993e-06, |
| "loss": 0.7496, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.9189473684210526, |
| "grad_norm": 0.6795002818107605, |
| "learning_rate": 8.752868256030658e-06, |
| "loss": 0.7543, |
| "step": 291 |
| }, |
| { |
| "epoch": 0.9221052631578948, |
| "grad_norm": 0.773586094379425, |
| "learning_rate": 8.740674466474357e-06, |
| "loss": 0.7822, |
| "step": 292 |
| }, |
| { |
| "epoch": 0.9252631578947368, |
| "grad_norm": 0.7866982817649841, |
| "learning_rate": 8.728429936842762e-06, |
| "loss": 0.7792, |
| "step": 293 |
| }, |
| { |
| "epoch": 0.9284210526315789, |
| "grad_norm": 0.6205121874809265, |
| "learning_rate": 8.716134833225803e-06, |
| "loss": 0.7541, |
| "step": 294 |
| }, |
| { |
| "epoch": 0.9315789473684211, |
| "grad_norm": 0.819891095161438, |
| "learning_rate": 8.70378932239941e-06, |
| "loss": 0.7477, |
| "step": 295 |
| }, |
| { |
| "epoch": 0.9347368421052632, |
| "grad_norm": 0.7897220253944397, |
| "learning_rate": 8.691393571823266e-06, |
| "loss": 0.7401, |
| "step": 296 |
| }, |
| { |
| "epoch": 0.9378947368421052, |
| "grad_norm": 0.772665798664093, |
| "learning_rate": 8.678947749638525e-06, |
| "loss": 0.7888, |
| "step": 297 |
| }, |
| { |
| "epoch": 0.9410526315789474, |
| "grad_norm": 0.7343323826789856, |
| "learning_rate": 8.666452024665533e-06, |
| "loss": 0.7321, |
| "step": 298 |
| }, |
| { |
| "epoch": 0.9442105263157895, |
| "grad_norm": 0.9545820355415344, |
| "learning_rate": 8.653906566401533e-06, |
| "loss": 0.761, |
| "step": 299 |
| }, |
| { |
| "epoch": 0.9473684210526315, |
| "grad_norm": 0.8397694230079651, |
| "learning_rate": 8.64131154501838e-06, |
| "loss": 0.7766, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.9505263157894737, |
| "grad_norm": 0.7189294695854187, |
| "learning_rate": 8.628667131360218e-06, |
| "loss": 0.7279, |
| "step": 301 |
| }, |
| { |
| "epoch": 0.9536842105263158, |
| "grad_norm": 0.7687074542045593, |
| "learning_rate": 8.61597349694117e-06, |
| "loss": 0.8155, |
| "step": 302 |
| }, |
| { |
| "epoch": 0.9568421052631579, |
| "grad_norm": 0.7857553362846375, |
| "learning_rate": 8.60323081394301e-06, |
| "loss": 0.7422, |
| "step": 303 |
| }, |
| { |
| "epoch": 0.96, |
| "grad_norm": 0.7681856751441956, |
| "learning_rate": 8.59043925521283e-06, |
| "loss": 0.7551, |
| "step": 304 |
| }, |
| { |
| "epoch": 0.9631578947368421, |
| "grad_norm": 0.7847979068756104, |
| "learning_rate": 8.577598994260687e-06, |
| "loss": 0.7978, |
| "step": 305 |
| }, |
| { |
| "epoch": 0.9663157894736842, |
| "grad_norm": 0.7032335996627808, |
| "learning_rate": 8.56471020525726e-06, |
| "loss": 0.7596, |
| "step": 306 |
| }, |
| { |
| "epoch": 0.9694736842105263, |
| "grad_norm": 0.8031393885612488, |
| "learning_rate": 8.551773063031484e-06, |
| "loss": 0.7615, |
| "step": 307 |
| }, |
| { |
| "epoch": 0.9726315789473684, |
| "grad_norm": 0.9480533599853516, |
| "learning_rate": 8.538787743068172e-06, |
| "loss": 0.7728, |
| "step": 308 |
| }, |
| { |
| "epoch": 0.9757894736842105, |
| "grad_norm": 0.7876819968223572, |
| "learning_rate": 8.525754421505646e-06, |
| "loss": 0.7871, |
| "step": 309 |
| }, |
| { |
| "epoch": 0.9789473684210527, |
| "grad_norm": 0.8743879199028015, |
| "learning_rate": 8.512673275133334e-06, |
| "loss": 0.7953, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.9821052631578947, |
| "grad_norm": 0.8548572063446045, |
| "learning_rate": 8.49954448138939e-06, |
| "loss": 0.751, |
| "step": 311 |
| }, |
| { |
| "epoch": 0.9852631578947368, |
| "grad_norm": 0.801235020160675, |
| "learning_rate": 8.486368218358268e-06, |
| "loss": 0.7325, |
| "step": 312 |
| }, |
| { |
| "epoch": 0.988421052631579, |
| "grad_norm": 0.6868360042572021, |
| "learning_rate": 8.473144664768322e-06, |
| "loss": 0.7565, |
| "step": 313 |
| }, |
| { |
| "epoch": 0.991578947368421, |
| "grad_norm": 0.6946709156036377, |
| "learning_rate": 8.459873999989367e-06, |
| "loss": 0.7436, |
| "step": 314 |
| }, |
| { |
| "epoch": 0.9947368421052631, |
| "grad_norm": 0.7966560125350952, |
| "learning_rate": 8.446556404030263e-06, |
| "loss": 0.7818, |
| "step": 315 |
| }, |
| { |
| "epoch": 0.9978947368421053, |
| "grad_norm": 0.7011910080909729, |
| "learning_rate": 8.433192057536458e-06, |
| "loss": 0.8047, |
| "step": 316 |
| }, |
| { |
| "epoch": 1.0010526315789474, |
| "grad_norm": 0.776292085647583, |
| "learning_rate": 8.419781141787549e-06, |
| "loss": 0.7393, |
| "step": 317 |
| }, |
| { |
| "epoch": 1.0042105263157894, |
| "grad_norm": 0.7284736037254333, |
| "learning_rate": 8.406323838694808e-06, |
| "loss": 0.6665, |
| "step": 318 |
| }, |
| { |
| "epoch": 1.0073684210526315, |
| "grad_norm": 0.6233306527137756, |
| "learning_rate": 8.392820330798734e-06, |
| "loss": 0.6677, |
| "step": 319 |
| }, |
| { |
| "epoch": 1.0105263157894737, |
| "grad_norm": 0.6907612085342407, |
| "learning_rate": 8.379270801266569e-06, |
| "loss": 0.6921, |
| "step": 320 |
| }, |
| { |
| "epoch": 1.0136842105263157, |
| "grad_norm": 0.7600920796394348, |
| "learning_rate": 8.3656754338898e-06, |
| "loss": 0.7129, |
| "step": 321 |
| }, |
| { |
| "epoch": 1.016842105263158, |
| "grad_norm": 0.7222225069999695, |
| "learning_rate": 8.352034413081687e-06, |
| "loss": 0.6633, |
| "step": 322 |
| }, |
| { |
| "epoch": 1.02, |
| "grad_norm": 0.8816447854042053, |
| "learning_rate": 8.33834792387475e-06, |
| "loss": 0.7188, |
| "step": 323 |
| }, |
| { |
| "epoch": 1.023157894736842, |
| "grad_norm": 0.7416877150535583, |
| "learning_rate": 8.324616151918263e-06, |
| "loss": 0.7279, |
| "step": 324 |
| }, |
| { |
| "epoch": 1.0263157894736843, |
| "grad_norm": 0.7744479775428772, |
| "learning_rate": 8.31083928347573e-06, |
| "loss": 0.6765, |
| "step": 325 |
| }, |
| { |
| "epoch": 1.0294736842105263, |
| "grad_norm": 0.7218484282493591, |
| "learning_rate": 8.297017505422366e-06, |
| "loss": 0.6872, |
| "step": 326 |
| }, |
| { |
| "epoch": 1.0326315789473683, |
| "grad_norm": 0.6902859807014465, |
| "learning_rate": 8.28315100524256e-06, |
| "loss": 0.6696, |
| "step": 327 |
| }, |
| { |
| "epoch": 1.0357894736842106, |
| "grad_norm": 0.6810570359230042, |
| "learning_rate": 8.269239971027328e-06, |
| "loss": 0.692, |
| "step": 328 |
| }, |
| { |
| "epoch": 1.0389473684210526, |
| "grad_norm": 0.7604600191116333, |
| "learning_rate": 8.255284591471762e-06, |
| "loss": 0.7042, |
| "step": 329 |
| }, |
| { |
| "epoch": 1.0421052631578946, |
| "grad_norm": 0.6768532991409302, |
| "learning_rate": 8.241285055872478e-06, |
| "loss": 0.7111, |
| "step": 330 |
| }, |
| { |
| "epoch": 1.045263157894737, |
| "grad_norm": 0.7930629849433899, |
| "learning_rate": 8.227241554125041e-06, |
| "loss": 0.7196, |
| "step": 331 |
| }, |
| { |
| "epoch": 1.048421052631579, |
| "grad_norm": 0.7775673270225525, |
| "learning_rate": 8.213154276721388e-06, |
| "loss": 0.6706, |
| "step": 332 |
| }, |
| { |
| "epoch": 1.051578947368421, |
| "grad_norm": 0.7330057621002197, |
| "learning_rate": 8.199023414747257e-06, |
| "loss": 0.6841, |
| "step": 333 |
| }, |
| { |
| "epoch": 1.0547368421052632, |
| "grad_norm": 0.6644675731658936, |
| "learning_rate": 8.18484915987957e-06, |
| "loss": 0.6813, |
| "step": 334 |
| }, |
| { |
| "epoch": 1.0578947368421052, |
| "grad_norm": 0.7848517894744873, |
| "learning_rate": 8.170631704383865e-06, |
| "loss": 0.6907, |
| "step": 335 |
| }, |
| { |
| "epoch": 1.0610526315789475, |
| "grad_norm": 0.6873379349708557, |
| "learning_rate": 8.15637124111166e-06, |
| "loss": 0.7008, |
| "step": 336 |
| }, |
| { |
| "epoch": 1.0642105263157895, |
| "grad_norm": 0.6563950777053833, |
| "learning_rate": 8.14206796349785e-06, |
| "loss": 0.7082, |
| "step": 337 |
| }, |
| { |
| "epoch": 1.0673684210526315, |
| "grad_norm": 0.6331363916397095, |
| "learning_rate": 8.127722065558087e-06, |
| "loss": 0.6702, |
| "step": 338 |
| }, |
| { |
| "epoch": 1.0705263157894738, |
| "grad_norm": 0.7190977931022644, |
| "learning_rate": 8.113333741886137e-06, |
| "loss": 0.6687, |
| "step": 339 |
| }, |
| { |
| "epoch": 1.0736842105263158, |
| "grad_norm": 0.6932594180107117, |
| "learning_rate": 8.098903187651252e-06, |
| "loss": 0.6677, |
| "step": 340 |
| }, |
| { |
| "epoch": 1.0768421052631578, |
| "grad_norm": 0.7676414251327515, |
| "learning_rate": 8.084430598595514e-06, |
| "loss": 0.6945, |
| "step": 341 |
| }, |
| { |
| "epoch": 1.08, |
| "grad_norm": 0.7621206045150757, |
| "learning_rate": 8.069916171031181e-06, |
| "loss": 0.6908, |
| "step": 342 |
| }, |
| { |
| "epoch": 1.083157894736842, |
| "grad_norm": 0.7197576761245728, |
| "learning_rate": 8.055360101838026e-06, |
| "loss": 0.7144, |
| "step": 343 |
| }, |
| { |
| "epoch": 1.0863157894736841, |
| "grad_norm": 0.7100708484649658, |
| "learning_rate": 8.04076258846067e-06, |
| "loss": 0.6983, |
| "step": 344 |
| }, |
| { |
| "epoch": 1.0894736842105264, |
| "grad_norm": 0.6768266558647156, |
| "learning_rate": 8.026123828905902e-06, |
| "loss": 0.6769, |
| "step": 345 |
| }, |
| { |
| "epoch": 1.0926315789473684, |
| "grad_norm": 0.7214046716690063, |
| "learning_rate": 8.011444021739986e-06, |
| "loss": 0.7022, |
| "step": 346 |
| }, |
| { |
| "epoch": 1.0957894736842104, |
| "grad_norm": 0.7675743699073792, |
| "learning_rate": 7.996723366085978e-06, |
| "loss": 0.6718, |
| "step": 347 |
| }, |
| { |
| "epoch": 1.0989473684210527, |
| "grad_norm": 0.7916297912597656, |
| "learning_rate": 7.981962061621012e-06, |
| "loss": 0.6947, |
| "step": 348 |
| }, |
| { |
| "epoch": 1.1021052631578947, |
| "grad_norm": 0.7070953845977783, |
| "learning_rate": 7.967160308573607e-06, |
| "loss": 0.6223, |
| "step": 349 |
| }, |
| { |
| "epoch": 1.1052631578947367, |
| "grad_norm": 0.8087365031242371, |
| "learning_rate": 7.952318307720943e-06, |
| "loss": 0.724, |
| "step": 350 |
| }, |
| { |
| "epoch": 1.108421052631579, |
| "grad_norm": 0.6387585401535034, |
| "learning_rate": 7.937436260386134e-06, |
| "loss": 0.6732, |
| "step": 351 |
| }, |
| { |
| "epoch": 1.111578947368421, |
| "grad_norm": 0.7180246114730835, |
| "learning_rate": 7.922514368435506e-06, |
| "loss": 0.669, |
| "step": 352 |
| }, |
| { |
| "epoch": 1.1147368421052632, |
| "grad_norm": 0.7500141859054565, |
| "learning_rate": 7.907552834275847e-06, |
| "loss": 0.6946, |
| "step": 353 |
| }, |
| { |
| "epoch": 1.1178947368421053, |
| "grad_norm": 0.6470015048980713, |
| "learning_rate": 7.892551860851679e-06, |
| "loss": 0.6826, |
| "step": 354 |
| }, |
| { |
| "epoch": 1.1210526315789473, |
| "grad_norm": 0.668079674243927, |
| "learning_rate": 7.877511651642486e-06, |
| "loss": 0.6672, |
| "step": 355 |
| }, |
| { |
| "epoch": 1.1242105263157895, |
| "grad_norm": 0.7336689829826355, |
| "learning_rate": 7.862432410659964e-06, |
| "loss": 0.6932, |
| "step": 356 |
| }, |
| { |
| "epoch": 1.1273684210526316, |
| "grad_norm": 0.6308228969573975, |
| "learning_rate": 7.847314342445258e-06, |
| "loss": 0.7253, |
| "step": 357 |
| }, |
| { |
| "epoch": 1.1305263157894736, |
| "grad_norm": 0.6617457866668701, |
| "learning_rate": 7.832157652066173e-06, |
| "loss": 0.7053, |
| "step": 358 |
| }, |
| { |
| "epoch": 1.1336842105263158, |
| "grad_norm": 0.7797347903251648, |
| "learning_rate": 7.81696254511441e-06, |
| "loss": 0.6927, |
| "step": 359 |
| }, |
| { |
| "epoch": 1.1368421052631579, |
| "grad_norm": 0.6679685711860657, |
| "learning_rate": 7.80172922770276e-06, |
| "loss": 0.6917, |
| "step": 360 |
| }, |
| { |
| "epoch": 1.1400000000000001, |
| "grad_norm": 0.7145143151283264, |
| "learning_rate": 7.786457906462329e-06, |
| "loss": 0.6967, |
| "step": 361 |
| }, |
| { |
| "epoch": 1.1431578947368422, |
| "grad_norm": 0.8625593185424805, |
| "learning_rate": 7.771148788539704e-06, |
| "loss": 0.6728, |
| "step": 362 |
| }, |
| { |
| "epoch": 1.1463157894736842, |
| "grad_norm": 0.7024162411689758, |
| "learning_rate": 7.755802081594179e-06, |
| "loss": 0.7053, |
| "step": 363 |
| }, |
| { |
| "epoch": 1.1494736842105264, |
| "grad_norm": 0.7329344153404236, |
| "learning_rate": 7.740417993794918e-06, |
| "loss": 0.644, |
| "step": 364 |
| }, |
| { |
| "epoch": 1.1526315789473685, |
| "grad_norm": 0.7487715482711792, |
| "learning_rate": 7.724996733818124e-06, |
| "loss": 0.6838, |
| "step": 365 |
| }, |
| { |
| "epoch": 1.1557894736842105, |
| "grad_norm": 0.7509961128234863, |
| "learning_rate": 7.709538510844234e-06, |
| "loss": 0.6527, |
| "step": 366 |
| }, |
| { |
| "epoch": 1.1589473684210527, |
| "grad_norm": 0.7864775657653809, |
| "learning_rate": 7.694043534555055e-06, |
| "loss": 0.7205, |
| "step": 367 |
| }, |
| { |
| "epoch": 1.1621052631578948, |
| "grad_norm": 0.821261465549469, |
| "learning_rate": 7.678512015130936e-06, |
| "loss": 0.6832, |
| "step": 368 |
| }, |
| { |
| "epoch": 1.1652631578947368, |
| "grad_norm": 0.7063290476799011, |
| "learning_rate": 7.662944163247916e-06, |
| "loss": 0.6964, |
| "step": 369 |
| }, |
| { |
| "epoch": 1.168421052631579, |
| "grad_norm": 0.7851418256759644, |
| "learning_rate": 7.647340190074854e-06, |
| "loss": 0.7028, |
| "step": 370 |
| }, |
| { |
| "epoch": 1.171578947368421, |
| "grad_norm": 0.747036337852478, |
| "learning_rate": 7.63170030727058e-06, |
| "loss": 0.637, |
| "step": 371 |
| }, |
| { |
| "epoch": 1.174736842105263, |
| "grad_norm": 0.6792373657226562, |
| "learning_rate": 7.616024726981015e-06, |
| "loss": 0.6903, |
| "step": 372 |
| }, |
| { |
| "epoch": 1.1778947368421053, |
| "grad_norm": 0.805324912071228, |
| "learning_rate": 7.600313661836298e-06, |
| "loss": 0.6826, |
| "step": 373 |
| }, |
| { |
| "epoch": 1.1810526315789474, |
| "grad_norm": 0.6382725238800049, |
| "learning_rate": 7.584567324947893e-06, |
| "loss": 0.6337, |
| "step": 374 |
| }, |
| { |
| "epoch": 1.1842105263157894, |
| "grad_norm": 0.606036365032196, |
| "learning_rate": 7.568785929905713e-06, |
| "loss": 0.7219, |
| "step": 375 |
| }, |
| { |
| "epoch": 1.1873684210526316, |
| "grad_norm": 0.6815652847290039, |
| "learning_rate": 7.552969690775209e-06, |
| "loss": 0.6729, |
| "step": 376 |
| }, |
| { |
| "epoch": 1.1905263157894737, |
| "grad_norm": 0.6756043434143066, |
| "learning_rate": 7.537118822094474e-06, |
| "loss": 0.7, |
| "step": 377 |
| }, |
| { |
| "epoch": 1.1936842105263157, |
| "grad_norm": 0.6447840929031372, |
| "learning_rate": 7.521233538871329e-06, |
| "loss": 0.6982, |
| "step": 378 |
| }, |
| { |
| "epoch": 1.196842105263158, |
| "grad_norm": 0.5919721126556396, |
| "learning_rate": 7.505314056580411e-06, |
| "loss": 0.6923, |
| "step": 379 |
| }, |
| { |
| "epoch": 1.2, |
| "grad_norm": 0.6429916620254517, |
| "learning_rate": 7.489360591160245e-06, |
| "loss": 0.6523, |
| "step": 380 |
| }, |
| { |
| "epoch": 1.203157894736842, |
| "grad_norm": 0.7389147281646729, |
| "learning_rate": 7.4733733590103185e-06, |
| "loss": 0.6774, |
| "step": 381 |
| }, |
| { |
| "epoch": 1.2063157894736842, |
| "grad_norm": 0.6661043763160706, |
| "learning_rate": 7.457352576988144e-06, |
| "loss": 0.7086, |
| "step": 382 |
| }, |
| { |
| "epoch": 1.2094736842105263, |
| "grad_norm": 0.6406580209732056, |
| "learning_rate": 7.441298462406321e-06, |
| "loss": 0.7328, |
| "step": 383 |
| }, |
| { |
| "epoch": 1.2126315789473685, |
| "grad_norm": 0.613018810749054, |
| "learning_rate": 7.4252112330295835e-06, |
| "loss": 0.7078, |
| "step": 384 |
| }, |
| { |
| "epoch": 1.2157894736842105, |
| "grad_norm": 0.696648359298706, |
| "learning_rate": 7.409091107071849e-06, |
| "loss": 0.7006, |
| "step": 385 |
| }, |
| { |
| "epoch": 1.2189473684210526, |
| "grad_norm": 0.6505605578422546, |
| "learning_rate": 7.392938303193257e-06, |
| "loss": 0.6805, |
| "step": 386 |
| }, |
| { |
| "epoch": 1.2221052631578948, |
| "grad_norm": 0.6389300227165222, |
| "learning_rate": 7.376753040497207e-06, |
| "loss": 0.6447, |
| "step": 387 |
| }, |
| { |
| "epoch": 1.2252631578947368, |
| "grad_norm": 0.6349870562553406, |
| "learning_rate": 7.3605355385273805e-06, |
| "loss": 0.6565, |
| "step": 388 |
| }, |
| { |
| "epoch": 1.2284210526315789, |
| "grad_norm": 0.6041010618209839, |
| "learning_rate": 7.344286017264765e-06, |
| "loss": 0.708, |
| "step": 389 |
| }, |
| { |
| "epoch": 1.231578947368421, |
| "grad_norm": 0.6843451261520386, |
| "learning_rate": 7.3280046971246786e-06, |
| "loss": 0.7028, |
| "step": 390 |
| }, |
| { |
| "epoch": 1.2347368421052631, |
| "grad_norm": 0.6678407788276672, |
| "learning_rate": 7.311691798953765e-06, |
| "loss": 0.702, |
| "step": 391 |
| }, |
| { |
| "epoch": 1.2378947368421054, |
| "grad_norm": 0.6551707983016968, |
| "learning_rate": 7.295347544027006e-06, |
| "loss": 0.6968, |
| "step": 392 |
| }, |
| { |
| "epoch": 1.2410526315789474, |
| "grad_norm": 0.6763453483581543, |
| "learning_rate": 7.278972154044722e-06, |
| "loss": 0.7002, |
| "step": 393 |
| }, |
| { |
| "epoch": 1.2442105263157894, |
| "grad_norm": 0.6329587697982788, |
| "learning_rate": 7.2625658511295635e-06, |
| "loss": 0.6731, |
| "step": 394 |
| }, |
| { |
| "epoch": 1.2473684210526317, |
| "grad_norm": 0.6213243007659912, |
| "learning_rate": 7.2461288578234955e-06, |
| "loss": 0.6926, |
| "step": 395 |
| }, |
| { |
| "epoch": 1.2505263157894737, |
| "grad_norm": 0.6473403573036194, |
| "learning_rate": 7.229661397084775e-06, |
| "loss": 0.684, |
| "step": 396 |
| }, |
| { |
| "epoch": 1.2536842105263157, |
| "grad_norm": 0.6111834049224854, |
| "learning_rate": 7.213163692284943e-06, |
| "loss": 0.7092, |
| "step": 397 |
| }, |
| { |
| "epoch": 1.256842105263158, |
| "grad_norm": 0.693008303642273, |
| "learning_rate": 7.196635967205776e-06, |
| "loss": 0.7175, |
| "step": 398 |
| }, |
| { |
| "epoch": 1.26, |
| "grad_norm": 0.6312058568000793, |
| "learning_rate": 7.180078446036259e-06, |
| "loss": 0.6499, |
| "step": 399 |
| }, |
| { |
| "epoch": 1.263157894736842, |
| "grad_norm": 0.6444021463394165, |
| "learning_rate": 7.163491353369545e-06, |
| "loss": 0.6972, |
| "step": 400 |
| }, |
| { |
| "epoch": 1.2663157894736843, |
| "grad_norm": 0.6179962158203125, |
| "learning_rate": 7.146874914199906e-06, |
| "loss": 0.7037, |
| "step": 401 |
| }, |
| { |
| "epoch": 1.2694736842105263, |
| "grad_norm": 0.6444630026817322, |
| "learning_rate": 7.130229353919685e-06, |
| "loss": 0.6928, |
| "step": 402 |
| }, |
| { |
| "epoch": 1.2726315789473683, |
| "grad_norm": 0.666993260383606, |
| "learning_rate": 7.113554898316231e-06, |
| "loss": 0.6872, |
| "step": 403 |
| }, |
| { |
| "epoch": 1.2757894736842106, |
| "grad_norm": 0.6936898231506348, |
| "learning_rate": 7.0968517735688445e-06, |
| "loss": 0.6788, |
| "step": 404 |
| }, |
| { |
| "epoch": 1.2789473684210526, |
| "grad_norm": 0.5810768604278564, |
| "learning_rate": 7.080120206245709e-06, |
| "loss": 0.7012, |
| "step": 405 |
| }, |
| { |
| "epoch": 1.2821052631578946, |
| "grad_norm": 0.6620011925697327, |
| "learning_rate": 7.063360423300808e-06, |
| "loss": 0.6811, |
| "step": 406 |
| }, |
| { |
| "epoch": 1.2852631578947369, |
| "grad_norm": 0.6566019058227539, |
| "learning_rate": 7.04657265207086e-06, |
| "loss": 0.6951, |
| "step": 407 |
| }, |
| { |
| "epoch": 1.288421052631579, |
| "grad_norm": 0.6208463311195374, |
| "learning_rate": 7.029757120272222e-06, |
| "loss": 0.7054, |
| "step": 408 |
| }, |
| { |
| "epoch": 1.291578947368421, |
| "grad_norm": 0.6561064124107361, |
| "learning_rate": 7.0129140559978184e-06, |
| "loss": 0.6849, |
| "step": 409 |
| }, |
| { |
| "epoch": 1.2947368421052632, |
| "grad_norm": 0.6156150698661804, |
| "learning_rate": 6.99604368771402e-06, |
| "loss": 0.6664, |
| "step": 410 |
| }, |
| { |
| "epoch": 1.2978947368421052, |
| "grad_norm": 0.7128709554672241, |
| "learning_rate": 6.979146244257573e-06, |
| "loss": 0.6856, |
| "step": 411 |
| }, |
| { |
| "epoch": 1.3010526315789472, |
| "grad_norm": 0.7587366700172424, |
| "learning_rate": 6.962221954832476e-06, |
| "loss": 0.6407, |
| "step": 412 |
| }, |
| { |
| "epoch": 1.3042105263157895, |
| "grad_norm": 0.7253438234329224, |
| "learning_rate": 6.945271049006882e-06, |
| "loss": 0.6383, |
| "step": 413 |
| }, |
| { |
| "epoch": 1.3073684210526315, |
| "grad_norm": 0.7263516187667847, |
| "learning_rate": 6.928293756709976e-06, |
| "loss": 0.7091, |
| "step": 414 |
| }, |
| { |
| "epoch": 1.3105263157894738, |
| "grad_norm": 0.7259902358055115, |
| "learning_rate": 6.911290308228861e-06, |
| "loss": 0.658, |
| "step": 415 |
| }, |
| { |
| "epoch": 1.3136842105263158, |
| "grad_norm": 0.6816960573196411, |
| "learning_rate": 6.894260934205437e-06, |
| "loss": 0.6276, |
| "step": 416 |
| }, |
| { |
| "epoch": 1.3168421052631578, |
| "grad_norm": 0.6776388883590698, |
| "learning_rate": 6.8772058656332626e-06, |
| "loss": 0.7156, |
| "step": 417 |
| }, |
| { |
| "epoch": 1.32, |
| "grad_norm": 0.658682644367218, |
| "learning_rate": 6.860125333854437e-06, |
| "loss": 0.6832, |
| "step": 418 |
| }, |
| { |
| "epoch": 1.323157894736842, |
| "grad_norm": 0.6834143400192261, |
| "learning_rate": 6.843019570556443e-06, |
| "loss": 0.7103, |
| "step": 419 |
| }, |
| { |
| "epoch": 1.3263157894736843, |
| "grad_norm": 0.7089201211929321, |
| "learning_rate": 6.82588880776902e-06, |
| "loss": 0.7263, |
| "step": 420 |
| }, |
| { |
| "epoch": 1.3294736842105264, |
| "grad_norm": 0.6268842220306396, |
| "learning_rate": 6.8087332778610116e-06, |
| "loss": 0.6906, |
| "step": 421 |
| }, |
| { |
| "epoch": 1.3326315789473684, |
| "grad_norm": 0.6012834906578064, |
| "learning_rate": 6.791553213537209e-06, |
| "loss": 0.7038, |
| "step": 422 |
| }, |
| { |
| "epoch": 1.3357894736842106, |
| "grad_norm": 0.6678085923194885, |
| "learning_rate": 6.774348847835203e-06, |
| "loss": 0.7062, |
| "step": 423 |
| }, |
| { |
| "epoch": 1.3389473684210527, |
| "grad_norm": 0.7389436364173889, |
| "learning_rate": 6.757120414122214e-06, |
| "loss": 0.6959, |
| "step": 424 |
| }, |
| { |
| "epoch": 1.3421052631578947, |
| "grad_norm": 0.6289021968841553, |
| "learning_rate": 6.739868146091934e-06, |
| "loss": 0.7158, |
| "step": 425 |
| }, |
| { |
| "epoch": 1.345263157894737, |
| "grad_norm": 0.6698178052902222, |
| "learning_rate": 6.722592277761355e-06, |
| "loss": 0.6744, |
| "step": 426 |
| }, |
| { |
| "epoch": 1.348421052631579, |
| "grad_norm": 0.7639443874359131, |
| "learning_rate": 6.705293043467589e-06, |
| "loss": 0.6726, |
| "step": 427 |
| }, |
| { |
| "epoch": 1.351578947368421, |
| "grad_norm": 0.6818212270736694, |
| "learning_rate": 6.687970677864696e-06, |
| "loss": 0.7182, |
| "step": 428 |
| }, |
| { |
| "epoch": 1.3547368421052632, |
| "grad_norm": 0.6343548893928528, |
| "learning_rate": 6.6706254159205e-06, |
| "loss": 0.6823, |
| "step": 429 |
| }, |
| { |
| "epoch": 1.3578947368421053, |
| "grad_norm": 0.6729276776313782, |
| "learning_rate": 6.653257492913398e-06, |
| "loss": 0.6729, |
| "step": 430 |
| }, |
| { |
| "epoch": 1.3610526315789473, |
| "grad_norm": 0.6306588053703308, |
| "learning_rate": 6.6358671444291735e-06, |
| "loss": 0.6753, |
| "step": 431 |
| }, |
| { |
| "epoch": 1.3642105263157895, |
| "grad_norm": 0.5845309495925903, |
| "learning_rate": 6.618454606357796e-06, |
| "loss": 0.6878, |
| "step": 432 |
| }, |
| { |
| "epoch": 1.3673684210526316, |
| "grad_norm": 0.7026935815811157, |
| "learning_rate": 6.601020114890227e-06, |
| "loss": 0.691, |
| "step": 433 |
| }, |
| { |
| "epoch": 1.3705263157894736, |
| "grad_norm": 0.7399420738220215, |
| "learning_rate": 6.5835639065152104e-06, |
| "loss": 0.7315, |
| "step": 434 |
| }, |
| { |
| "epoch": 1.3736842105263158, |
| "grad_norm": 0.6496444344520569, |
| "learning_rate": 6.56608621801607e-06, |
| "loss": 0.7043, |
| "step": 435 |
| }, |
| { |
| "epoch": 1.3768421052631579, |
| "grad_norm": 0.6120364665985107, |
| "learning_rate": 6.548587286467491e-06, |
| "loss": 0.6685, |
| "step": 436 |
| }, |
| { |
| "epoch": 1.38, |
| "grad_norm": 0.6417269706726074, |
| "learning_rate": 6.531067349232314e-06, |
| "loss": 0.6831, |
| "step": 437 |
| }, |
| { |
| "epoch": 1.3831578947368421, |
| "grad_norm": 0.8120979070663452, |
| "learning_rate": 6.5135266439583015e-06, |
| "loss": 0.6841, |
| "step": 438 |
| }, |
| { |
| "epoch": 1.3863157894736842, |
| "grad_norm": 0.7373508214950562, |
| "learning_rate": 6.495965408574929e-06, |
| "loss": 0.6976, |
| "step": 439 |
| }, |
| { |
| "epoch": 1.3894736842105262, |
| "grad_norm": 0.6089206337928772, |
| "learning_rate": 6.478383881290152e-06, |
| "loss": 0.6892, |
| "step": 440 |
| }, |
| { |
| "epoch": 1.3926315789473684, |
| "grad_norm": 0.7098264098167419, |
| "learning_rate": 6.460782300587166e-06, |
| "loss": 0.7105, |
| "step": 441 |
| }, |
| { |
| "epoch": 1.3957894736842105, |
| "grad_norm": 0.7377170920372009, |
| "learning_rate": 6.443160905221188e-06, |
| "loss": 0.6848, |
| "step": 442 |
| }, |
| { |
| "epoch": 1.3989473684210525, |
| "grad_norm": 0.6914153099060059, |
| "learning_rate": 6.425519934216204e-06, |
| "loss": 0.7005, |
| "step": 443 |
| }, |
| { |
| "epoch": 1.4021052631578947, |
| "grad_norm": 0.6577550172805786, |
| "learning_rate": 6.407859626861734e-06, |
| "loss": 0.6872, |
| "step": 444 |
| }, |
| { |
| "epoch": 1.4052631578947368, |
| "grad_norm": 0.7401857972145081, |
| "learning_rate": 6.390180222709583e-06, |
| "loss": 0.6642, |
| "step": 445 |
| }, |
| { |
| "epoch": 1.408421052631579, |
| "grad_norm": 0.653255820274353, |
| "learning_rate": 6.372481961570597e-06, |
| "loss": 0.6641, |
| "step": 446 |
| }, |
| { |
| "epoch": 1.411578947368421, |
| "grad_norm": 0.5870925784111023, |
| "learning_rate": 6.3547650835114014e-06, |
| "loss": 0.6783, |
| "step": 447 |
| }, |
| { |
| "epoch": 1.4147368421052633, |
| "grad_norm": 0.7446110248565674, |
| "learning_rate": 6.337029828851151e-06, |
| "loss": 0.6965, |
| "step": 448 |
| }, |
| { |
| "epoch": 1.4178947368421053, |
| "grad_norm": 0.7110916972160339, |
| "learning_rate": 6.319276438158271e-06, |
| "loss": 0.6985, |
| "step": 449 |
| }, |
| { |
| "epoch": 1.4210526315789473, |
| "grad_norm": 0.6280571222305298, |
| "learning_rate": 6.301505152247185e-06, |
| "loss": 0.6855, |
| "step": 450 |
| }, |
| { |
| "epoch": 1.4242105263157896, |
| "grad_norm": 0.6359561681747437, |
| "learning_rate": 6.283716212175062e-06, |
| "loss": 0.6676, |
| "step": 451 |
| }, |
| { |
| "epoch": 1.4273684210526316, |
| "grad_norm": 0.670707106590271, |
| "learning_rate": 6.265909859238536e-06, |
| "loss": 0.6898, |
| "step": 452 |
| }, |
| { |
| "epoch": 1.4305263157894736, |
| "grad_norm": 0.6176190376281738, |
| "learning_rate": 6.248086334970435e-06, |
| "loss": 0.6481, |
| "step": 453 |
| }, |
| { |
| "epoch": 1.433684210526316, |
| "grad_norm": 0.6055241823196411, |
| "learning_rate": 6.230245881136509e-06, |
| "loss": 0.6382, |
| "step": 454 |
| }, |
| { |
| "epoch": 1.436842105263158, |
| "grad_norm": 0.6523144245147705, |
| "learning_rate": 6.2123887397321456e-06, |
| "loss": 0.7032, |
| "step": 455 |
| }, |
| { |
| "epoch": 1.44, |
| "grad_norm": 0.6355512142181396, |
| "learning_rate": 6.194515152979093e-06, |
| "loss": 0.6795, |
| "step": 456 |
| }, |
| { |
| "epoch": 1.4431578947368422, |
| "grad_norm": 0.5738744735717773, |
| "learning_rate": 6.176625363322164e-06, |
| "loss": 0.7088, |
| "step": 457 |
| }, |
| { |
| "epoch": 1.4463157894736842, |
| "grad_norm": 0.5846467018127441, |
| "learning_rate": 6.158719613425964e-06, |
| "loss": 0.7205, |
| "step": 458 |
| }, |
| { |
| "epoch": 1.4494736842105262, |
| "grad_norm": 0.6887890696525574, |
| "learning_rate": 6.140798146171581e-06, |
| "loss": 0.7071, |
| "step": 459 |
| }, |
| { |
| "epoch": 1.4526315789473685, |
| "grad_norm": 0.6898921132087708, |
| "learning_rate": 6.122861204653304e-06, |
| "loss": 0.6667, |
| "step": 460 |
| }, |
| { |
| "epoch": 1.4557894736842105, |
| "grad_norm": 0.6479648351669312, |
| "learning_rate": 6.104909032175323e-06, |
| "loss": 0.6884, |
| "step": 461 |
| }, |
| { |
| "epoch": 1.4589473684210525, |
| "grad_norm": 0.7151727676391602, |
| "learning_rate": 6.086941872248424e-06, |
| "loss": 0.6796, |
| "step": 462 |
| }, |
| { |
| "epoch": 1.4621052631578948, |
| "grad_norm": 0.649958074092865, |
| "learning_rate": 6.068959968586689e-06, |
| "loss": 0.7084, |
| "step": 463 |
| }, |
| { |
| "epoch": 1.4652631578947368, |
| "grad_norm": 0.590797483921051, |
| "learning_rate": 6.050963565104191e-06, |
| "loss": 0.6626, |
| "step": 464 |
| }, |
| { |
| "epoch": 1.4684210526315788, |
| "grad_norm": 0.6719523072242737, |
| "learning_rate": 6.032952905911686e-06, |
| "loss": 0.6946, |
| "step": 465 |
| }, |
| { |
| "epoch": 1.471578947368421, |
| "grad_norm": 0.6793899536132812, |
| "learning_rate": 6.014928235313301e-06, |
| "loss": 0.6863, |
| "step": 466 |
| }, |
| { |
| "epoch": 1.4747368421052631, |
| "grad_norm": 0.6513259410858154, |
| "learning_rate": 5.996889797803214e-06, |
| "loss": 0.7068, |
| "step": 467 |
| }, |
| { |
| "epoch": 1.4778947368421052, |
| "grad_norm": 0.5904105305671692, |
| "learning_rate": 5.978837838062348e-06, |
| "loss": 0.6878, |
| "step": 468 |
| }, |
| { |
| "epoch": 1.4810526315789474, |
| "grad_norm": 0.6193195581436157, |
| "learning_rate": 5.9607726009550494e-06, |
| "loss": 0.6824, |
| "step": 469 |
| }, |
| { |
| "epoch": 1.4842105263157894, |
| "grad_norm": 0.6615768074989319, |
| "learning_rate": 5.942694331525758e-06, |
| "loss": 0.6957, |
| "step": 470 |
| }, |
| { |
| "epoch": 1.4873684210526315, |
| "grad_norm": 0.5977884531021118, |
| "learning_rate": 5.924603274995693e-06, |
| "loss": 0.6732, |
| "step": 471 |
| }, |
| { |
| "epoch": 1.4905263157894737, |
| "grad_norm": 0.6094414591789246, |
| "learning_rate": 5.906499676759524e-06, |
| "loss": 0.6896, |
| "step": 472 |
| }, |
| { |
| "epoch": 1.4936842105263157, |
| "grad_norm": 0.5905715227127075, |
| "learning_rate": 5.88838378238204e-06, |
| "loss": 0.6964, |
| "step": 473 |
| }, |
| { |
| "epoch": 1.496842105263158, |
| "grad_norm": 0.6121339797973633, |
| "learning_rate": 5.8702558375948206e-06, |
| "loss": 0.6704, |
| "step": 474 |
| }, |
| { |
| "epoch": 1.5, |
| "grad_norm": 0.5825003385543823, |
| "learning_rate": 5.852116088292901e-06, |
| "loss": 0.6957, |
| "step": 475 |
| }, |
| { |
| "epoch": 1.5031578947368422, |
| "grad_norm": 0.6180065870285034, |
| "learning_rate": 5.8339647805314404e-06, |
| "loss": 0.7134, |
| "step": 476 |
| }, |
| { |
| "epoch": 1.506315789473684, |
| "grad_norm": 0.6621575355529785, |
| "learning_rate": 5.815802160522379e-06, |
| "loss": 0.6579, |
| "step": 477 |
| }, |
| { |
| "epoch": 1.5094736842105263, |
| "grad_norm": 0.6555248498916626, |
| "learning_rate": 5.797628474631102e-06, |
| "loss": 0.6424, |
| "step": 478 |
| }, |
| { |
| "epoch": 1.5126315789473685, |
| "grad_norm": 0.6568667888641357, |
| "learning_rate": 5.7794439693730975e-06, |
| "loss": 0.6965, |
| "step": 479 |
| }, |
| { |
| "epoch": 1.5157894736842106, |
| "grad_norm": 0.6549687385559082, |
| "learning_rate": 5.761248891410613e-06, |
| "loss": 0.7017, |
| "step": 480 |
| }, |
| { |
| "epoch": 1.5189473684210526, |
| "grad_norm": 0.6165953874588013, |
| "learning_rate": 5.743043487549306e-06, |
| "loss": 0.719, |
| "step": 481 |
| }, |
| { |
| "epoch": 1.5221052631578948, |
| "grad_norm": 0.6918132901191711, |
| "learning_rate": 5.7248280047348995e-06, |
| "loss": 0.6964, |
| "step": 482 |
| }, |
| { |
| "epoch": 1.5252631578947369, |
| "grad_norm": 0.6982928514480591, |
| "learning_rate": 5.706602690049832e-06, |
| "loss": 0.6933, |
| "step": 483 |
| }, |
| { |
| "epoch": 1.528421052631579, |
| "grad_norm": 0.6259163618087769, |
| "learning_rate": 5.688367790709909e-06, |
| "loss": 0.7202, |
| "step": 484 |
| }, |
| { |
| "epoch": 1.5315789473684212, |
| "grad_norm": 0.7041128873825073, |
| "learning_rate": 5.6701235540609405e-06, |
| "loss": 0.7115, |
| "step": 485 |
| }, |
| { |
| "epoch": 1.5347368421052632, |
| "grad_norm": 0.6209255456924438, |
| "learning_rate": 5.651870227575391e-06, |
| "loss": 0.6945, |
| "step": 486 |
| }, |
| { |
| "epoch": 1.5378947368421052, |
| "grad_norm": 0.6310893893241882, |
| "learning_rate": 5.633608058849033e-06, |
| "loss": 0.6985, |
| "step": 487 |
| }, |
| { |
| "epoch": 1.5410526315789475, |
| "grad_norm": 0.6265409588813782, |
| "learning_rate": 5.61533729559757e-06, |
| "loss": 0.7472, |
| "step": 488 |
| }, |
| { |
| "epoch": 1.5442105263157895, |
| "grad_norm": 0.621372640132904, |
| "learning_rate": 5.5970581856532864e-06, |
| "loss": 0.6869, |
| "step": 489 |
| }, |
| { |
| "epoch": 1.5473684210526315, |
| "grad_norm": 0.6584547758102417, |
| "learning_rate": 5.578770976961685e-06, |
| "loss": 0.6797, |
| "step": 490 |
| }, |
| { |
| "epoch": 1.5505263157894738, |
| "grad_norm": 0.6084906458854675, |
| "learning_rate": 5.560475917578129e-06, |
| "loss": 0.6897, |
| "step": 491 |
| }, |
| { |
| "epoch": 1.5536842105263158, |
| "grad_norm": 0.6554166674613953, |
| "learning_rate": 5.542173255664463e-06, |
| "loss": 0.6844, |
| "step": 492 |
| }, |
| { |
| "epoch": 1.5568421052631578, |
| "grad_norm": 0.5814923048019409, |
| "learning_rate": 5.523863239485661e-06, |
| "loss": 0.7098, |
| "step": 493 |
| }, |
| { |
| "epoch": 1.56, |
| "grad_norm": 0.6252988576889038, |
| "learning_rate": 5.505546117406449e-06, |
| "loss": 0.7227, |
| "step": 494 |
| }, |
| { |
| "epoch": 1.563157894736842, |
| "grad_norm": 0.6618260145187378, |
| "learning_rate": 5.487222137887949e-06, |
| "loss": 0.6874, |
| "step": 495 |
| }, |
| { |
| "epoch": 1.566315789473684, |
| "grad_norm": 0.6781257390975952, |
| "learning_rate": 5.4688915494842886e-06, |
| "loss": 0.6582, |
| "step": 496 |
| }, |
| { |
| "epoch": 1.5694736842105264, |
| "grad_norm": 0.7620246410369873, |
| "learning_rate": 5.450554600839251e-06, |
| "loss": 0.6982, |
| "step": 497 |
| }, |
| { |
| "epoch": 1.5726315789473684, |
| "grad_norm": 0.646678626537323, |
| "learning_rate": 5.432211540682887e-06, |
| "loss": 0.6742, |
| "step": 498 |
| }, |
| { |
| "epoch": 1.5757894736842104, |
| "grad_norm": 0.7081868648529053, |
| "learning_rate": 5.413862617828147e-06, |
| "loss": 0.656, |
| "step": 499 |
| }, |
| { |
| "epoch": 1.5789473684210527, |
| "grad_norm": 0.6851574182510376, |
| "learning_rate": 5.395508081167506e-06, |
| "loss": 0.6885, |
| "step": 500 |
| }, |
| { |
| "epoch": 1.582105263157895, |
| "grad_norm": 0.679471492767334, |
| "learning_rate": 5.37714817966959e-06, |
| "loss": 0.6661, |
| "step": 501 |
| }, |
| { |
| "epoch": 1.5852631578947367, |
| "grad_norm": 0.6246220469474792, |
| "learning_rate": 5.35878316237579e-06, |
| "loss": 0.6632, |
| "step": 502 |
| }, |
| { |
| "epoch": 1.588421052631579, |
| "grad_norm": 0.6367597579956055, |
| "learning_rate": 5.340413278396896e-06, |
| "loss": 0.6997, |
| "step": 503 |
| }, |
| { |
| "epoch": 1.5915789473684212, |
| "grad_norm": 0.7005538940429688, |
| "learning_rate": 5.322038776909705e-06, |
| "loss": 0.7041, |
| "step": 504 |
| }, |
| { |
| "epoch": 1.594736842105263, |
| "grad_norm": 0.6432095766067505, |
| "learning_rate": 5.303659907153654e-06, |
| "loss": 0.6719, |
| "step": 505 |
| }, |
| { |
| "epoch": 1.5978947368421053, |
| "grad_norm": 0.5566903352737427, |
| "learning_rate": 5.285276918427432e-06, |
| "loss": 0.6733, |
| "step": 506 |
| }, |
| { |
| "epoch": 1.6010526315789475, |
| "grad_norm": 0.5807555317878723, |
| "learning_rate": 5.2668900600855955e-06, |
| "loss": 0.7036, |
| "step": 507 |
| }, |
| { |
| "epoch": 1.6042105263157893, |
| "grad_norm": 0.5966797471046448, |
| "learning_rate": 5.248499581535193e-06, |
| "loss": 0.682, |
| "step": 508 |
| }, |
| { |
| "epoch": 1.6073684210526316, |
| "grad_norm": 0.646507740020752, |
| "learning_rate": 5.2301057322323786e-06, |
| "loss": 0.7059, |
| "step": 509 |
| }, |
| { |
| "epoch": 1.6105263157894738, |
| "grad_norm": 0.6112558841705322, |
| "learning_rate": 5.211708761679031e-06, |
| "loss": 0.6732, |
| "step": 510 |
| }, |
| { |
| "epoch": 1.6136842105263158, |
| "grad_norm": 0.6396105885505676, |
| "learning_rate": 5.193308919419363e-06, |
| "loss": 0.7091, |
| "step": 511 |
| }, |
| { |
| "epoch": 1.6168421052631579, |
| "grad_norm": 0.678657054901123, |
| "learning_rate": 5.1749064550365414e-06, |
| "loss": 0.6975, |
| "step": 512 |
| }, |
| { |
| "epoch": 1.62, |
| "grad_norm": 0.6183331608772278, |
| "learning_rate": 5.156501618149301e-06, |
| "loss": 0.6832, |
| "step": 513 |
| }, |
| { |
| "epoch": 1.6231578947368421, |
| "grad_norm": 0.6091780066490173, |
| "learning_rate": 5.13809465840856e-06, |
| "loss": 0.6911, |
| "step": 514 |
| }, |
| { |
| "epoch": 1.6263157894736842, |
| "grad_norm": 0.6304209232330322, |
| "learning_rate": 5.11968582549403e-06, |
| "loss": 0.6461, |
| "step": 515 |
| }, |
| { |
| "epoch": 1.6294736842105264, |
| "grad_norm": 0.6831485629081726, |
| "learning_rate": 5.10127536911083e-06, |
| "loss": 0.6757, |
| "step": 516 |
| }, |
| { |
| "epoch": 1.6326315789473684, |
| "grad_norm": 0.6476657390594482, |
| "learning_rate": 5.082863538986103e-06, |
| "loss": 0.6935, |
| "step": 517 |
| }, |
| { |
| "epoch": 1.6357894736842105, |
| "grad_norm": 0.6213446259498596, |
| "learning_rate": 5.064450584865624e-06, |
| "loss": 0.668, |
| "step": 518 |
| }, |
| { |
| "epoch": 1.6389473684210527, |
| "grad_norm": 0.6051395535469055, |
| "learning_rate": 5.046036756510417e-06, |
| "loss": 0.6926, |
| "step": 519 |
| }, |
| { |
| "epoch": 1.6421052631578947, |
| "grad_norm": 0.6724497079849243, |
| "learning_rate": 5.027622303693363e-06, |
| "loss": 0.6912, |
| "step": 520 |
| }, |
| { |
| "epoch": 1.6452631578947368, |
| "grad_norm": 0.6436070799827576, |
| "learning_rate": 5.0092074761958085e-06, |
| "loss": 0.6791, |
| "step": 521 |
| }, |
| { |
| "epoch": 1.648421052631579, |
| "grad_norm": 0.602382242679596, |
| "learning_rate": 4.990792523804192e-06, |
| "loss": 0.7104, |
| "step": 522 |
| }, |
| { |
| "epoch": 1.651578947368421, |
| "grad_norm": 0.6710056662559509, |
| "learning_rate": 4.972377696306639e-06, |
| "loss": 0.6412, |
| "step": 523 |
| }, |
| { |
| "epoch": 1.654736842105263, |
| "grad_norm": 0.6339631676673889, |
| "learning_rate": 4.953963243489583e-06, |
| "loss": 0.696, |
| "step": 524 |
| }, |
| { |
| "epoch": 1.6578947368421053, |
| "grad_norm": 0.6232373118400574, |
| "learning_rate": 4.935549415134376e-06, |
| "loss": 0.6824, |
| "step": 525 |
| }, |
| { |
| "epoch": 1.6610526315789473, |
| "grad_norm": 0.5875207781791687, |
| "learning_rate": 4.9171364610139e-06, |
| "loss": 0.7114, |
| "step": 526 |
| }, |
| { |
| "epoch": 1.6642105263157894, |
| "grad_norm": 0.6042991876602173, |
| "learning_rate": 4.898724630889172e-06, |
| "loss": 0.6792, |
| "step": 527 |
| }, |
| { |
| "epoch": 1.6673684210526316, |
| "grad_norm": 0.6709006428718567, |
| "learning_rate": 4.880314174505972e-06, |
| "loss": 0.6597, |
| "step": 528 |
| }, |
| { |
| "epoch": 1.6705263157894736, |
| "grad_norm": 0.6271777153015137, |
| "learning_rate": 4.861905341591442e-06, |
| "loss": 0.6763, |
| "step": 529 |
| }, |
| { |
| "epoch": 1.6736842105263157, |
| "grad_norm": 0.6083865165710449, |
| "learning_rate": 4.843498381850701e-06, |
| "loss": 0.6882, |
| "step": 530 |
| }, |
| { |
| "epoch": 1.676842105263158, |
| "grad_norm": 0.544291079044342, |
| "learning_rate": 4.82509354496346e-06, |
| "loss": 0.705, |
| "step": 531 |
| }, |
| { |
| "epoch": 1.6800000000000002, |
| "grad_norm": 0.6035980582237244, |
| "learning_rate": 4.8066910805806384e-06, |
| "loss": 0.6771, |
| "step": 532 |
| }, |
| { |
| "epoch": 1.683157894736842, |
| "grad_norm": 0.6175979971885681, |
| "learning_rate": 4.78829123832097e-06, |
| "loss": 0.7164, |
| "step": 533 |
| }, |
| { |
| "epoch": 1.6863157894736842, |
| "grad_norm": 0.6440612077713013, |
| "learning_rate": 4.769894267767621e-06, |
| "loss": 0.6812, |
| "step": 534 |
| }, |
| { |
| "epoch": 1.6894736842105265, |
| "grad_norm": 0.6156215071678162, |
| "learning_rate": 4.751500418464809e-06, |
| "loss": 0.6916, |
| "step": 535 |
| }, |
| { |
| "epoch": 1.6926315789473683, |
| "grad_norm": 0.5670990943908691, |
| "learning_rate": 4.733109939914407e-06, |
| "loss": 0.6901, |
| "step": 536 |
| }, |
| { |
| "epoch": 1.6957894736842105, |
| "grad_norm": 0.6149737238883972, |
| "learning_rate": 4.714723081572571e-06, |
| "loss": 0.6412, |
| "step": 537 |
| }, |
| { |
| "epoch": 1.6989473684210528, |
| "grad_norm": 0.6131264567375183, |
| "learning_rate": 4.696340092846347e-06, |
| "loss": 0.6881, |
| "step": 538 |
| }, |
| { |
| "epoch": 1.7021052631578948, |
| "grad_norm": 0.5654208660125732, |
| "learning_rate": 4.677961223090297e-06, |
| "loss": 0.7101, |
| "step": 539 |
| }, |
| { |
| "epoch": 1.7052631578947368, |
| "grad_norm": 0.6652705073356628, |
| "learning_rate": 4.659586721603107e-06, |
| "loss": 0.7, |
| "step": 540 |
| }, |
| { |
| "epoch": 1.708421052631579, |
| "grad_norm": 0.6081032752990723, |
| "learning_rate": 4.641216837624211e-06, |
| "loss": 0.7095, |
| "step": 541 |
| }, |
| { |
| "epoch": 1.711578947368421, |
| "grad_norm": 0.5956637263298035, |
| "learning_rate": 4.622851820330412e-06, |
| "loss": 0.6858, |
| "step": 542 |
| }, |
| { |
| "epoch": 1.7147368421052631, |
| "grad_norm": 0.6079306602478027, |
| "learning_rate": 4.604491918832494e-06, |
| "loss": 0.6929, |
| "step": 543 |
| }, |
| { |
| "epoch": 1.7178947368421054, |
| "grad_norm": 0.5816709399223328, |
| "learning_rate": 4.586137382171856e-06, |
| "loss": 0.6681, |
| "step": 544 |
| }, |
| { |
| "epoch": 1.7210526315789474, |
| "grad_norm": 0.6434882879257202, |
| "learning_rate": 4.567788459317116e-06, |
| "loss": 0.6812, |
| "step": 545 |
| }, |
| { |
| "epoch": 1.7242105263157894, |
| "grad_norm": 0.6063170433044434, |
| "learning_rate": 4.54944539916075e-06, |
| "loss": 0.6832, |
| "step": 546 |
| }, |
| { |
| "epoch": 1.7273684210526317, |
| "grad_norm": 0.6341161727905273, |
| "learning_rate": 4.531108450515712e-06, |
| "loss": 0.692, |
| "step": 547 |
| }, |
| { |
| "epoch": 1.7305263157894737, |
| "grad_norm": 0.6072157621383667, |
| "learning_rate": 4.512777862112053e-06, |
| "loss": 0.6582, |
| "step": 548 |
| }, |
| { |
| "epoch": 1.7336842105263157, |
| "grad_norm": 0.5953732132911682, |
| "learning_rate": 4.494453882593552e-06, |
| "loss": 0.663, |
| "step": 549 |
| }, |
| { |
| "epoch": 1.736842105263158, |
| "grad_norm": 0.6301018595695496, |
| "learning_rate": 4.476136760514341e-06, |
| "loss": 0.6686, |
| "step": 550 |
| }, |
| { |
| "epoch": 1.74, |
| "grad_norm": 0.628161609172821, |
| "learning_rate": 4.457826744335538e-06, |
| "loss": 0.6909, |
| "step": 551 |
| }, |
| { |
| "epoch": 1.743157894736842, |
| "grad_norm": 0.5877235531806946, |
| "learning_rate": 4.439524082421872e-06, |
| "loss": 0.6515, |
| "step": 552 |
| }, |
| { |
| "epoch": 1.7463157894736843, |
| "grad_norm": 0.6606840491294861, |
| "learning_rate": 4.421229023038316e-06, |
| "loss": 0.6738, |
| "step": 553 |
| }, |
| { |
| "epoch": 1.7494736842105263, |
| "grad_norm": 0.5608782768249512, |
| "learning_rate": 4.402941814346716e-06, |
| "loss": 0.6991, |
| "step": 554 |
| }, |
| { |
| "epoch": 1.7526315789473683, |
| "grad_norm": 0.5692189335823059, |
| "learning_rate": 4.384662704402433e-06, |
| "loss": 0.6547, |
| "step": 555 |
| }, |
| { |
| "epoch": 1.7557894736842106, |
| "grad_norm": 0.589918315410614, |
| "learning_rate": 4.366391941150969e-06, |
| "loss": 0.691, |
| "step": 556 |
| }, |
| { |
| "epoch": 1.7589473684210526, |
| "grad_norm": 0.5952433943748474, |
| "learning_rate": 4.34812977242461e-06, |
| "loss": 0.6866, |
| "step": 557 |
| }, |
| { |
| "epoch": 1.7621052631578946, |
| "grad_norm": 0.6127429008483887, |
| "learning_rate": 4.329876445939062e-06, |
| "loss": 0.7041, |
| "step": 558 |
| }, |
| { |
| "epoch": 1.7652631578947369, |
| "grad_norm": 0.6347705125808716, |
| "learning_rate": 4.3116322092900925e-06, |
| "loss": 0.6565, |
| "step": 559 |
| }, |
| { |
| "epoch": 1.768421052631579, |
| "grad_norm": 0.5670740604400635, |
| "learning_rate": 4.293397309950168e-06, |
| "loss": 0.7044, |
| "step": 560 |
| }, |
| { |
| "epoch": 1.771578947368421, |
| "grad_norm": 0.6550533175468445, |
| "learning_rate": 4.275171995265101e-06, |
| "loss": 0.6867, |
| "step": 561 |
| }, |
| { |
| "epoch": 1.7747368421052632, |
| "grad_norm": 0.5752691030502319, |
| "learning_rate": 4.256956512450697e-06, |
| "loss": 0.7015, |
| "step": 562 |
| }, |
| { |
| "epoch": 1.7778947368421054, |
| "grad_norm": 0.6235054731369019, |
| "learning_rate": 4.238751108589389e-06, |
| "loss": 0.6954, |
| "step": 563 |
| }, |
| { |
| "epoch": 1.7810526315789472, |
| "grad_norm": 0.6959113478660583, |
| "learning_rate": 4.220556030626904e-06, |
| "loss": 0.6945, |
| "step": 564 |
| }, |
| { |
| "epoch": 1.7842105263157895, |
| "grad_norm": 0.5536004304885864, |
| "learning_rate": 4.202371525368899e-06, |
| "loss": 0.6849, |
| "step": 565 |
| }, |
| { |
| "epoch": 1.7873684210526317, |
| "grad_norm": 0.5802682042121887, |
| "learning_rate": 4.184197839477622e-06, |
| "loss": 0.6898, |
| "step": 566 |
| }, |
| { |
| "epoch": 1.7905263157894735, |
| "grad_norm": 0.6382439136505127, |
| "learning_rate": 4.166035219468561e-06, |
| "loss": 0.6784, |
| "step": 567 |
| }, |
| { |
| "epoch": 1.7936842105263158, |
| "grad_norm": 0.5798212289810181, |
| "learning_rate": 4.1478839117071e-06, |
| "loss": 0.7353, |
| "step": 568 |
| }, |
| { |
| "epoch": 1.796842105263158, |
| "grad_norm": 0.6376686096191406, |
| "learning_rate": 4.12974416240518e-06, |
| "loss": 0.665, |
| "step": 569 |
| }, |
| { |
| "epoch": 1.8, |
| "grad_norm": 0.566200852394104, |
| "learning_rate": 4.11161621761796e-06, |
| "loss": 0.6689, |
| "step": 570 |
| }, |
| { |
| "epoch": 1.803157894736842, |
| "grad_norm": 0.6278012990951538, |
| "learning_rate": 4.093500323240479e-06, |
| "loss": 0.6716, |
| "step": 571 |
| }, |
| { |
| "epoch": 1.8063157894736843, |
| "grad_norm": 0.5599969625473022, |
| "learning_rate": 4.075396725004308e-06, |
| "loss": 0.6789, |
| "step": 572 |
| }, |
| { |
| "epoch": 1.8094736842105263, |
| "grad_norm": 0.6141947507858276, |
| "learning_rate": 4.057305668474244e-06, |
| "loss": 0.6696, |
| "step": 573 |
| }, |
| { |
| "epoch": 1.8126315789473684, |
| "grad_norm": 0.6866646409034729, |
| "learning_rate": 4.039227399044952e-06, |
| "loss": 0.7151, |
| "step": 574 |
| }, |
| { |
| "epoch": 1.8157894736842106, |
| "grad_norm": 0.5738588571548462, |
| "learning_rate": 4.021162161937653e-06, |
| "loss": 0.6942, |
| "step": 575 |
| }, |
| { |
| "epoch": 1.8189473684210526, |
| "grad_norm": 0.6379348039627075, |
| "learning_rate": 4.003110202196787e-06, |
| "loss": 0.6812, |
| "step": 576 |
| }, |
| { |
| "epoch": 1.8221052631578947, |
| "grad_norm": 0.6298931837081909, |
| "learning_rate": 3.985071764686701e-06, |
| "loss": 0.6711, |
| "step": 577 |
| }, |
| { |
| "epoch": 1.825263157894737, |
| "grad_norm": 0.6079388856887817, |
| "learning_rate": 3.9670470940883144e-06, |
| "loss": 0.6989, |
| "step": 578 |
| }, |
| { |
| "epoch": 1.828421052631579, |
| "grad_norm": 0.5470216870307922, |
| "learning_rate": 3.94903643489581e-06, |
| "loss": 0.7104, |
| "step": 579 |
| }, |
| { |
| "epoch": 1.831578947368421, |
| "grad_norm": 0.5332590341567993, |
| "learning_rate": 3.931040031413313e-06, |
| "loss": 0.6709, |
| "step": 580 |
| }, |
| { |
| "epoch": 1.8347368421052632, |
| "grad_norm": 0.5865632891654968, |
| "learning_rate": 3.913058127751578e-06, |
| "loss": 0.6886, |
| "step": 581 |
| }, |
| { |
| "epoch": 1.8378947368421052, |
| "grad_norm": 0.55133056640625, |
| "learning_rate": 3.895090967824678e-06, |
| "loss": 0.6543, |
| "step": 582 |
| }, |
| { |
| "epoch": 1.8410526315789473, |
| "grad_norm": 0.5989318490028381, |
| "learning_rate": 3.877138795346697e-06, |
| "loss": 0.64, |
| "step": 583 |
| }, |
| { |
| "epoch": 1.8442105263157895, |
| "grad_norm": 0.7691587805747986, |
| "learning_rate": 3.85920185382842e-06, |
| "loss": 0.6826, |
| "step": 584 |
| }, |
| { |
| "epoch": 1.8473684210526315, |
| "grad_norm": 0.6429767608642578, |
| "learning_rate": 3.841280386574037e-06, |
| "loss": 0.7013, |
| "step": 585 |
| }, |
| { |
| "epoch": 1.8505263157894736, |
| "grad_norm": 0.6208791732788086, |
| "learning_rate": 3.823374636677837e-06, |
| "loss": 0.6459, |
| "step": 586 |
| }, |
| { |
| "epoch": 1.8536842105263158, |
| "grad_norm": 0.5830709934234619, |
| "learning_rate": 3.8054848470209094e-06, |
| "loss": 0.678, |
| "step": 587 |
| }, |
| { |
| "epoch": 1.8568421052631578, |
| "grad_norm": 0.7179412245750427, |
| "learning_rate": 3.7876112602678544e-06, |
| "loss": 0.7003, |
| "step": 588 |
| }, |
| { |
| "epoch": 1.8599999999999999, |
| "grad_norm": 0.72615647315979, |
| "learning_rate": 3.7697541188634934e-06, |
| "loss": 0.6505, |
| "step": 589 |
| }, |
| { |
| "epoch": 1.8631578947368421, |
| "grad_norm": 0.6515791416168213, |
| "learning_rate": 3.7519136650295673e-06, |
| "loss": 0.7111, |
| "step": 590 |
| }, |
| { |
| "epoch": 1.8663157894736844, |
| "grad_norm": 0.6129830479621887, |
| "learning_rate": 3.734090140761466e-06, |
| "loss": 0.6769, |
| "step": 591 |
| }, |
| { |
| "epoch": 1.8694736842105262, |
| "grad_norm": 0.6099033951759338, |
| "learning_rate": 3.716283787824939e-06, |
| "loss": 0.6742, |
| "step": 592 |
| }, |
| { |
| "epoch": 1.8726315789473684, |
| "grad_norm": 0.6122466325759888, |
| "learning_rate": 3.698494847752816e-06, |
| "loss": 0.6786, |
| "step": 593 |
| }, |
| { |
| "epoch": 1.8757894736842107, |
| "grad_norm": 0.6633381843566895, |
| "learning_rate": 3.6807235618417314e-06, |
| "loss": 0.6878, |
| "step": 594 |
| }, |
| { |
| "epoch": 1.8789473684210525, |
| "grad_norm": 0.669661819934845, |
| "learning_rate": 3.6629701711488485e-06, |
| "loss": 0.6794, |
| "step": 595 |
| }, |
| { |
| "epoch": 1.8821052631578947, |
| "grad_norm": 0.5703924894332886, |
| "learning_rate": 3.645234916488599e-06, |
| "loss": 0.6814, |
| "step": 596 |
| }, |
| { |
| "epoch": 1.885263157894737, |
| "grad_norm": 0.6017104387283325, |
| "learning_rate": 3.6275180384294033e-06, |
| "loss": 0.6442, |
| "step": 597 |
| }, |
| { |
| "epoch": 1.888421052631579, |
| "grad_norm": 0.5789917707443237, |
| "learning_rate": 3.609819777290418e-06, |
| "loss": 0.6669, |
| "step": 598 |
| }, |
| { |
| "epoch": 1.891578947368421, |
| "grad_norm": 0.6235988140106201, |
| "learning_rate": 3.5921403731382685e-06, |
| "loss": 0.636, |
| "step": 599 |
| }, |
| { |
| "epoch": 1.8947368421052633, |
| "grad_norm": 0.6223317384719849, |
| "learning_rate": 3.5744800657837984e-06, |
| "loss": 0.6911, |
| "step": 600 |
| }, |
| { |
| "epoch": 1.8978947368421053, |
| "grad_norm": 0.6065788269042969, |
| "learning_rate": 3.556839094778814e-06, |
| "loss": 0.6495, |
| "step": 601 |
| }, |
| { |
| "epoch": 1.9010526315789473, |
| "grad_norm": 0.6049926280975342, |
| "learning_rate": 3.5392176994128357e-06, |
| "loss": 0.7152, |
| "step": 602 |
| }, |
| { |
| "epoch": 1.9042105263157896, |
| "grad_norm": 0.5944041609764099, |
| "learning_rate": 3.5216161187098497e-06, |
| "loss": 0.6835, |
| "step": 603 |
| }, |
| { |
| "epoch": 1.9073684210526316, |
| "grad_norm": 0.6229305863380432, |
| "learning_rate": 3.504034591425071e-06, |
| "loss": 0.674, |
| "step": 604 |
| }, |
| { |
| "epoch": 1.9105263157894736, |
| "grad_norm": 0.6037994623184204, |
| "learning_rate": 3.4864733560416998e-06, |
| "loss": 0.6526, |
| "step": 605 |
| }, |
| { |
| "epoch": 1.9136842105263159, |
| "grad_norm": 0.6101519465446472, |
| "learning_rate": 3.468932650767689e-06, |
| "loss": 0.6693, |
| "step": 606 |
| }, |
| { |
| "epoch": 1.916842105263158, |
| "grad_norm": 0.5631049871444702, |
| "learning_rate": 3.4514127135325105e-06, |
| "loss": 0.6967, |
| "step": 607 |
| }, |
| { |
| "epoch": 1.92, |
| "grad_norm": 0.5985934734344482, |
| "learning_rate": 3.433913781983932e-06, |
| "loss": 0.6955, |
| "step": 608 |
| }, |
| { |
| "epoch": 1.9231578947368422, |
| "grad_norm": 0.5818445086479187, |
| "learning_rate": 3.4164360934847912e-06, |
| "loss": 0.6642, |
| "step": 609 |
| }, |
| { |
| "epoch": 1.9263157894736842, |
| "grad_norm": 0.6571829319000244, |
| "learning_rate": 3.3989798851097744e-06, |
| "loss": 0.6848, |
| "step": 610 |
| }, |
| { |
| "epoch": 1.9294736842105262, |
| "grad_norm": 0.5725364685058594, |
| "learning_rate": 3.381545393642205e-06, |
| "loss": 0.6863, |
| "step": 611 |
| }, |
| { |
| "epoch": 1.9326315789473685, |
| "grad_norm": 0.6822765469551086, |
| "learning_rate": 3.3641328555708286e-06, |
| "loss": 0.7075, |
| "step": 612 |
| }, |
| { |
| "epoch": 1.9357894736842105, |
| "grad_norm": 0.6487634778022766, |
| "learning_rate": 3.3467425070866034e-06, |
| "loss": 0.675, |
| "step": 613 |
| }, |
| { |
| "epoch": 1.9389473684210525, |
| "grad_norm": 0.6720603108406067, |
| "learning_rate": 3.3293745840795004e-06, |
| "loss": 0.6709, |
| "step": 614 |
| }, |
| { |
| "epoch": 1.9421052631578948, |
| "grad_norm": 0.5721080303192139, |
| "learning_rate": 3.312029322135306e-06, |
| "loss": 0.6994, |
| "step": 615 |
| }, |
| { |
| "epoch": 1.9452631578947368, |
| "grad_norm": 0.6841775178909302, |
| "learning_rate": 3.2947069565324134e-06, |
| "loss": 0.6535, |
| "step": 616 |
| }, |
| { |
| "epoch": 1.9484210526315788, |
| "grad_norm": 0.6342321038246155, |
| "learning_rate": 3.2774077222386465e-06, |
| "loss": 0.6453, |
| "step": 617 |
| }, |
| { |
| "epoch": 1.951578947368421, |
| "grad_norm": 0.667797863483429, |
| "learning_rate": 3.260131853908066e-06, |
| "loss": 0.7042, |
| "step": 618 |
| }, |
| { |
| "epoch": 1.954736842105263, |
| "grad_norm": 0.6030858755111694, |
| "learning_rate": 3.2428795858777873e-06, |
| "loss": 0.6576, |
| "step": 619 |
| }, |
| { |
| "epoch": 1.9578947368421051, |
| "grad_norm": 0.6035648584365845, |
| "learning_rate": 3.225651152164799e-06, |
| "loss": 0.7307, |
| "step": 620 |
| }, |
| { |
| "epoch": 1.9610526315789474, |
| "grad_norm": 0.6750203371047974, |
| "learning_rate": 3.208446786462791e-06, |
| "loss": 0.7058, |
| "step": 621 |
| }, |
| { |
| "epoch": 1.9642105263157896, |
| "grad_norm": 0.6008321046829224, |
| "learning_rate": 3.1912667221389892e-06, |
| "loss": 0.6851, |
| "step": 622 |
| }, |
| { |
| "epoch": 1.9673684210526314, |
| "grad_norm": 0.6270667910575867, |
| "learning_rate": 3.1741111922309797e-06, |
| "loss": 0.6851, |
| "step": 623 |
| }, |
| { |
| "epoch": 1.9705263157894737, |
| "grad_norm": 0.5572888255119324, |
| "learning_rate": 3.156980429443559e-06, |
| "loss": 0.6896, |
| "step": 624 |
| }, |
| { |
| "epoch": 1.973684210526316, |
| "grad_norm": 0.6308818459510803, |
| "learning_rate": 3.1398746661455647e-06, |
| "loss": 0.7103, |
| "step": 625 |
| }, |
| { |
| "epoch": 1.9768421052631577, |
| "grad_norm": 0.6266995668411255, |
| "learning_rate": 3.122794134366738e-06, |
| "loss": 0.6679, |
| "step": 626 |
| }, |
| { |
| "epoch": 1.98, |
| "grad_norm": 0.5579388737678528, |
| "learning_rate": 3.105739065794565e-06, |
| "loss": 0.677, |
| "step": 627 |
| }, |
| { |
| "epoch": 1.9831578947368422, |
| "grad_norm": 0.6134604215621948, |
| "learning_rate": 3.0887096917711408e-06, |
| "loss": 0.6466, |
| "step": 628 |
| }, |
| { |
| "epoch": 1.9863157894736843, |
| "grad_norm": 0.6037647128105164, |
| "learning_rate": 3.071706243290026e-06, |
| "loss": 0.6906, |
| "step": 629 |
| }, |
| { |
| "epoch": 1.9894736842105263, |
| "grad_norm": 0.636805534362793, |
| "learning_rate": 3.0547289509931194e-06, |
| "loss": 0.6838, |
| "step": 630 |
| }, |
| { |
| "epoch": 1.9926315789473685, |
| "grad_norm": 0.6072667837142944, |
| "learning_rate": 3.0377780451675243e-06, |
| "loss": 0.6751, |
| "step": 631 |
| }, |
| { |
| "epoch": 1.9957894736842106, |
| "grad_norm": 0.5715926885604858, |
| "learning_rate": 3.020853755742428e-06, |
| "loss": 0.6666, |
| "step": 632 |
| }, |
| { |
| "epoch": 1.9989473684210526, |
| "grad_norm": 0.634506106376648, |
| "learning_rate": 3.0039563122859815e-06, |
| "loss": 0.706, |
| "step": 633 |
| }, |
| { |
| "epoch": 2.002105263157895, |
| "grad_norm": 0.6592942476272583, |
| "learning_rate": 2.9870859440021845e-06, |
| "loss": 0.6761, |
| "step": 634 |
| }, |
| { |
| "epoch": 2.0052631578947366, |
| "grad_norm": 0.7126978635787964, |
| "learning_rate": 2.970242879727778e-06, |
| "loss": 0.6233, |
| "step": 635 |
| }, |
| { |
| "epoch": 2.008421052631579, |
| "grad_norm": 0.5990601778030396, |
| "learning_rate": 2.953427347929142e-06, |
| "loss": 0.6303, |
| "step": 636 |
| }, |
| { |
| "epoch": 2.011578947368421, |
| "grad_norm": 0.5881582498550415, |
| "learning_rate": 2.936639576699194e-06, |
| "loss": 0.5866, |
| "step": 637 |
| }, |
| { |
| "epoch": 2.014736842105263, |
| "grad_norm": 0.6993144750595093, |
| "learning_rate": 2.9198797937542935e-06, |
| "loss": 0.6256, |
| "step": 638 |
| }, |
| { |
| "epoch": 2.017894736842105, |
| "grad_norm": 0.6200084686279297, |
| "learning_rate": 2.903148226431155e-06, |
| "loss": 0.6453, |
| "step": 639 |
| }, |
| { |
| "epoch": 2.0210526315789474, |
| "grad_norm": 0.6157467365264893, |
| "learning_rate": 2.8864451016837703e-06, |
| "loss": 0.606, |
| "step": 640 |
| }, |
| { |
| "epoch": 2.0242105263157897, |
| "grad_norm": 0.6459230780601501, |
| "learning_rate": 2.869770646080316e-06, |
| "loss": 0.6267, |
| "step": 641 |
| }, |
| { |
| "epoch": 2.0273684210526315, |
| "grad_norm": 0.7284769415855408, |
| "learning_rate": 2.853125085800096e-06, |
| "loss": 0.6036, |
| "step": 642 |
| }, |
| { |
| "epoch": 2.0305263157894737, |
| "grad_norm": 0.65320885181427, |
| "learning_rate": 2.836508646630457e-06, |
| "loss": 0.6798, |
| "step": 643 |
| }, |
| { |
| "epoch": 2.033684210526316, |
| "grad_norm": 0.6911686658859253, |
| "learning_rate": 2.8199215539637427e-06, |
| "loss": 0.6431, |
| "step": 644 |
| }, |
| { |
| "epoch": 2.036842105263158, |
| "grad_norm": 0.6208088994026184, |
| "learning_rate": 2.8033640327942235e-06, |
| "loss": 0.673, |
| "step": 645 |
| }, |
| { |
| "epoch": 2.04, |
| "grad_norm": 0.6126965284347534, |
| "learning_rate": 2.786836307715056e-06, |
| "loss": 0.6152, |
| "step": 646 |
| }, |
| { |
| "epoch": 2.0431578947368423, |
| "grad_norm": 0.7363206148147583, |
| "learning_rate": 2.7703386029152246e-06, |
| "loss": 0.6492, |
| "step": 647 |
| }, |
| { |
| "epoch": 2.046315789473684, |
| "grad_norm": 0.6911171078681946, |
| "learning_rate": 2.753871142176506e-06, |
| "loss": 0.6315, |
| "step": 648 |
| }, |
| { |
| "epoch": 2.0494736842105263, |
| "grad_norm": 0.6493732929229736, |
| "learning_rate": 2.737434148870437e-06, |
| "loss": 0.6249, |
| "step": 649 |
| }, |
| { |
| "epoch": 2.0526315789473686, |
| "grad_norm": 0.6811050772666931, |
| "learning_rate": 2.7210278459552786e-06, |
| "loss": 0.6038, |
| "step": 650 |
| }, |
| { |
| "epoch": 2.0557894736842104, |
| "grad_norm": 0.6253058910369873, |
| "learning_rate": 2.704652455972997e-06, |
| "loss": 0.6369, |
| "step": 651 |
| }, |
| { |
| "epoch": 2.0589473684210526, |
| "grad_norm": 0.5684403777122498, |
| "learning_rate": 2.688308201046236e-06, |
| "loss": 0.6551, |
| "step": 652 |
| }, |
| { |
| "epoch": 2.062105263157895, |
| "grad_norm": 0.6057170629501343, |
| "learning_rate": 2.6719953028753214e-06, |
| "loss": 0.5905, |
| "step": 653 |
| }, |
| { |
| "epoch": 2.0652631578947367, |
| "grad_norm": 0.603461742401123, |
| "learning_rate": 2.655713982735234e-06, |
| "loss": 0.6049, |
| "step": 654 |
| }, |
| { |
| "epoch": 2.068421052631579, |
| "grad_norm": 0.5477900505065918, |
| "learning_rate": 2.6394644614726215e-06, |
| "loss": 0.6091, |
| "step": 655 |
| }, |
| { |
| "epoch": 2.071578947368421, |
| "grad_norm": 0.6187466382980347, |
| "learning_rate": 2.623246959502795e-06, |
| "loss": 0.6247, |
| "step": 656 |
| }, |
| { |
| "epoch": 2.074736842105263, |
| "grad_norm": 0.5489943623542786, |
| "learning_rate": 2.6070616968067446e-06, |
| "loss": 0.6522, |
| "step": 657 |
| }, |
| { |
| "epoch": 2.0778947368421052, |
| "grad_norm": 0.5987683534622192, |
| "learning_rate": 2.5909088929281534e-06, |
| "loss": 0.613, |
| "step": 658 |
| }, |
| { |
| "epoch": 2.0810526315789475, |
| "grad_norm": 0.5765239000320435, |
| "learning_rate": 2.574788766970418e-06, |
| "loss": 0.6277, |
| "step": 659 |
| }, |
| { |
| "epoch": 2.0842105263157893, |
| "grad_norm": 0.566733717918396, |
| "learning_rate": 2.55870153759368e-06, |
| "loss": 0.5948, |
| "step": 660 |
| }, |
| { |
| "epoch": 2.0873684210526315, |
| "grad_norm": 0.5783083438873291, |
| "learning_rate": 2.542647423011857e-06, |
| "loss": 0.6302, |
| "step": 661 |
| }, |
| { |
| "epoch": 2.090526315789474, |
| "grad_norm": 0.6213259696960449, |
| "learning_rate": 2.526626640989683e-06, |
| "loss": 0.607, |
| "step": 662 |
| }, |
| { |
| "epoch": 2.0936842105263156, |
| "grad_norm": 0.5684741735458374, |
| "learning_rate": 2.510639408839757e-06, |
| "loss": 0.6122, |
| "step": 663 |
| }, |
| { |
| "epoch": 2.096842105263158, |
| "grad_norm": 0.5746645927429199, |
| "learning_rate": 2.4946859434195904e-06, |
| "loss": 0.6336, |
| "step": 664 |
| }, |
| { |
| "epoch": 2.1, |
| "grad_norm": 0.554362416267395, |
| "learning_rate": 2.478766461128672e-06, |
| "loss": 0.6155, |
| "step": 665 |
| }, |
| { |
| "epoch": 2.103157894736842, |
| "grad_norm": 0.5717495679855347, |
| "learning_rate": 2.4628811779055277e-06, |
| "loss": 0.6356, |
| "step": 666 |
| }, |
| { |
| "epoch": 2.106315789473684, |
| "grad_norm": 0.5523346662521362, |
| "learning_rate": 2.4470303092247926e-06, |
| "loss": 0.5855, |
| "step": 667 |
| }, |
| { |
| "epoch": 2.1094736842105264, |
| "grad_norm": 0.5319191813468933, |
| "learning_rate": 2.431214070094289e-06, |
| "loss": 0.5865, |
| "step": 668 |
| }, |
| { |
| "epoch": 2.1126315789473686, |
| "grad_norm": 0.52125483751297, |
| "learning_rate": 2.4154326750521084e-06, |
| "loss": 0.6155, |
| "step": 669 |
| }, |
| { |
| "epoch": 2.1157894736842104, |
| "grad_norm": 0.5644091963768005, |
| "learning_rate": 2.3996863381637046e-06, |
| "loss": 0.5797, |
| "step": 670 |
| }, |
| { |
| "epoch": 2.1189473684210527, |
| "grad_norm": 0.5224770307540894, |
| "learning_rate": 2.383975273018986e-06, |
| "loss": 0.6275, |
| "step": 671 |
| }, |
| { |
| "epoch": 2.122105263157895, |
| "grad_norm": 0.5387361645698547, |
| "learning_rate": 2.3682996927294216e-06, |
| "loss": 0.5888, |
| "step": 672 |
| }, |
| { |
| "epoch": 2.1252631578947367, |
| "grad_norm": 0.6090015172958374, |
| "learning_rate": 2.3526598099251473e-06, |
| "loss": 0.5825, |
| "step": 673 |
| }, |
| { |
| "epoch": 2.128421052631579, |
| "grad_norm": 0.5374984741210938, |
| "learning_rate": 2.3370558367520856e-06, |
| "loss": 0.6262, |
| "step": 674 |
| }, |
| { |
| "epoch": 2.1315789473684212, |
| "grad_norm": 0.581964373588562, |
| "learning_rate": 2.321487984869064e-06, |
| "loss": 0.5949, |
| "step": 675 |
| }, |
| { |
| "epoch": 2.134736842105263, |
| "grad_norm": 0.5389418601989746, |
| "learning_rate": 2.305956465444945e-06, |
| "loss": 0.6104, |
| "step": 676 |
| }, |
| { |
| "epoch": 2.1378947368421053, |
| "grad_norm": 0.5349326729774475, |
| "learning_rate": 2.290461489155768e-06, |
| "loss": 0.6828, |
| "step": 677 |
| }, |
| { |
| "epoch": 2.1410526315789475, |
| "grad_norm": 0.5772843360900879, |
| "learning_rate": 2.275003266181877e-06, |
| "loss": 0.5864, |
| "step": 678 |
| }, |
| { |
| "epoch": 2.1442105263157893, |
| "grad_norm": 0.6210830211639404, |
| "learning_rate": 2.2595820062050854e-06, |
| "loss": 0.6047, |
| "step": 679 |
| }, |
| { |
| "epoch": 2.1473684210526316, |
| "grad_norm": 0.5432112812995911, |
| "learning_rate": 2.2441979184058223e-06, |
| "loss": 0.6402, |
| "step": 680 |
| }, |
| { |
| "epoch": 2.150526315789474, |
| "grad_norm": 0.5438613891601562, |
| "learning_rate": 2.2288512114602986e-06, |
| "loss": 0.5905, |
| "step": 681 |
| }, |
| { |
| "epoch": 2.1536842105263156, |
| "grad_norm": 0.5581168532371521, |
| "learning_rate": 2.213542093537675e-06, |
| "loss": 0.5887, |
| "step": 682 |
| }, |
| { |
| "epoch": 2.156842105263158, |
| "grad_norm": 0.5446808338165283, |
| "learning_rate": 2.1982707722972383e-06, |
| "loss": 0.6573, |
| "step": 683 |
| }, |
| { |
| "epoch": 2.16, |
| "grad_norm": 0.6228784322738647, |
| "learning_rate": 2.1830374548855905e-06, |
| "loss": 0.6083, |
| "step": 684 |
| }, |
| { |
| "epoch": 2.163157894736842, |
| "grad_norm": 0.577155351638794, |
| "learning_rate": 2.167842347933826e-06, |
| "loss": 0.614, |
| "step": 685 |
| }, |
| { |
| "epoch": 2.166315789473684, |
| "grad_norm": 0.560425877571106, |
| "learning_rate": 2.1526856575547444e-06, |
| "loss": 0.6321, |
| "step": 686 |
| }, |
| { |
| "epoch": 2.1694736842105264, |
| "grad_norm": 0.547867476940155, |
| "learning_rate": 2.1375675893400373e-06, |
| "loss": 0.6192, |
| "step": 687 |
| }, |
| { |
| "epoch": 2.1726315789473682, |
| "grad_norm": 0.5950310826301575, |
| "learning_rate": 2.1224883483575166e-06, |
| "loss": 0.6292, |
| "step": 688 |
| }, |
| { |
| "epoch": 2.1757894736842105, |
| "grad_norm": 0.595939576625824, |
| "learning_rate": 2.1074481391483233e-06, |
| "loss": 0.6098, |
| "step": 689 |
| }, |
| { |
| "epoch": 2.1789473684210527, |
| "grad_norm": 0.6173756122589111, |
| "learning_rate": 2.0924471657241526e-06, |
| "loss": 0.6209, |
| "step": 690 |
| }, |
| { |
| "epoch": 2.1821052631578945, |
| "grad_norm": 0.5339811444282532, |
| "learning_rate": 2.0774856315644955e-06, |
| "loss": 0.6303, |
| "step": 691 |
| }, |
| { |
| "epoch": 2.185263157894737, |
| "grad_norm": 0.5910825133323669, |
| "learning_rate": 2.0625637396138666e-06, |
| "loss": 0.578, |
| "step": 692 |
| }, |
| { |
| "epoch": 2.188421052631579, |
| "grad_norm": 0.574038565158844, |
| "learning_rate": 2.0476816922790575e-06, |
| "loss": 0.5743, |
| "step": 693 |
| }, |
| { |
| "epoch": 2.191578947368421, |
| "grad_norm": 0.537571370601654, |
| "learning_rate": 2.0328396914263925e-06, |
| "loss": 0.6386, |
| "step": 694 |
| }, |
| { |
| "epoch": 2.194736842105263, |
| "grad_norm": 0.562436580657959, |
| "learning_rate": 2.0180379383789907e-06, |
| "loss": 0.6322, |
| "step": 695 |
| }, |
| { |
| "epoch": 2.1978947368421053, |
| "grad_norm": 0.5552737712860107, |
| "learning_rate": 2.0032766339140246e-06, |
| "loss": 0.6069, |
| "step": 696 |
| }, |
| { |
| "epoch": 2.201052631578947, |
| "grad_norm": 0.5092212557792664, |
| "learning_rate": 1.988555978260013e-06, |
| "loss": 0.6607, |
| "step": 697 |
| }, |
| { |
| "epoch": 2.2042105263157894, |
| "grad_norm": 0.558074414730072, |
| "learning_rate": 1.973876171094097e-06, |
| "loss": 0.6178, |
| "step": 698 |
| }, |
| { |
| "epoch": 2.2073684210526316, |
| "grad_norm": 0.5705953240394592, |
| "learning_rate": 1.9592374115393293e-06, |
| "loss": 0.6321, |
| "step": 699 |
| }, |
| { |
| "epoch": 2.2105263157894735, |
| "grad_norm": 0.5584784150123596, |
| "learning_rate": 1.9446398981619757e-06, |
| "loss": 0.5915, |
| "step": 700 |
| }, |
| { |
| "epoch": 2.2136842105263157, |
| "grad_norm": 0.5504412055015564, |
| "learning_rate": 1.9300838289688216e-06, |
| "loss": 0.6143, |
| "step": 701 |
| }, |
| { |
| "epoch": 2.216842105263158, |
| "grad_norm": 0.5174974203109741, |
| "learning_rate": 1.915569401404488e-06, |
| "loss": 0.5982, |
| "step": 702 |
| }, |
| { |
| "epoch": 2.22, |
| "grad_norm": 0.5328599810600281, |
| "learning_rate": 1.9010968123487478e-06, |
| "loss": 0.6022, |
| "step": 703 |
| }, |
| { |
| "epoch": 2.223157894736842, |
| "grad_norm": 0.5421804785728455, |
| "learning_rate": 1.8866662581138646e-06, |
| "loss": 0.5817, |
| "step": 704 |
| }, |
| { |
| "epoch": 2.2263157894736842, |
| "grad_norm": 0.57915198802948, |
| "learning_rate": 1.8722779344419139e-06, |
| "loss": 0.613, |
| "step": 705 |
| }, |
| { |
| "epoch": 2.2294736842105265, |
| "grad_norm": 0.5778079628944397, |
| "learning_rate": 1.8579320365021508e-06, |
| "loss": 0.6301, |
| "step": 706 |
| }, |
| { |
| "epoch": 2.2326315789473683, |
| "grad_norm": 0.5267239212989807, |
| "learning_rate": 1.8436287588883416e-06, |
| "loss": 0.6269, |
| "step": 707 |
| }, |
| { |
| "epoch": 2.2357894736842105, |
| "grad_norm": 0.5386923551559448, |
| "learning_rate": 1.8293682956161357e-06, |
| "loss": 0.5903, |
| "step": 708 |
| }, |
| { |
| "epoch": 2.238947368421053, |
| "grad_norm": 0.5258237719535828, |
| "learning_rate": 1.8151508401204298e-06, |
| "loss": 0.6086, |
| "step": 709 |
| }, |
| { |
| "epoch": 2.2421052631578946, |
| "grad_norm": 0.5495336055755615, |
| "learning_rate": 1.800976585252745e-06, |
| "loss": 0.6247, |
| "step": 710 |
| }, |
| { |
| "epoch": 2.245263157894737, |
| "grad_norm": 0.55051589012146, |
| "learning_rate": 1.7868457232786117e-06, |
| "loss": 0.5962, |
| "step": 711 |
| }, |
| { |
| "epoch": 2.248421052631579, |
| "grad_norm": 0.5342631936073303, |
| "learning_rate": 1.7727584458749608e-06, |
| "loss": 0.587, |
| "step": 712 |
| }, |
| { |
| "epoch": 2.251578947368421, |
| "grad_norm": 0.5018291473388672, |
| "learning_rate": 1.7587149441275236e-06, |
| "loss": 0.6476, |
| "step": 713 |
| }, |
| { |
| "epoch": 2.254736842105263, |
| "grad_norm": 0.5503069758415222, |
| "learning_rate": 1.7447154085282398e-06, |
| "loss": 0.613, |
| "step": 714 |
| }, |
| { |
| "epoch": 2.2578947368421054, |
| "grad_norm": 0.5181307196617126, |
| "learning_rate": 1.7307600289726745e-06, |
| "loss": 0.6221, |
| "step": 715 |
| }, |
| { |
| "epoch": 2.261052631578947, |
| "grad_norm": 0.5215415358543396, |
| "learning_rate": 1.7168489947574407e-06, |
| "loss": 0.6311, |
| "step": 716 |
| }, |
| { |
| "epoch": 2.2642105263157895, |
| "grad_norm": 0.559654712677002, |
| "learning_rate": 1.7029824945776346e-06, |
| "loss": 0.5764, |
| "step": 717 |
| }, |
| { |
| "epoch": 2.2673684210526317, |
| "grad_norm": 0.5514314770698547, |
| "learning_rate": 1.6891607165242718e-06, |
| "loss": 0.629, |
| "step": 718 |
| }, |
| { |
| "epoch": 2.2705263157894735, |
| "grad_norm": 0.4893876910209656, |
| "learning_rate": 1.6753838480817397e-06, |
| "loss": 0.6328, |
| "step": 719 |
| }, |
| { |
| "epoch": 2.2736842105263158, |
| "grad_norm": 0.5276650190353394, |
| "learning_rate": 1.661652076125252e-06, |
| "loss": 0.5912, |
| "step": 720 |
| }, |
| { |
| "epoch": 2.276842105263158, |
| "grad_norm": 0.5329832434654236, |
| "learning_rate": 1.6479655869183142e-06, |
| "loss": 0.61, |
| "step": 721 |
| }, |
| { |
| "epoch": 2.2800000000000002, |
| "grad_norm": 0.535116970539093, |
| "learning_rate": 1.6343245661102031e-06, |
| "loss": 0.6346, |
| "step": 722 |
| }, |
| { |
| "epoch": 2.283157894736842, |
| "grad_norm": 0.5258433818817139, |
| "learning_rate": 1.620729198733434e-06, |
| "loss": 0.6336, |
| "step": 723 |
| }, |
| { |
| "epoch": 2.2863157894736843, |
| "grad_norm": 0.5541167259216309, |
| "learning_rate": 1.6071796692012663e-06, |
| "loss": 0.6221, |
| "step": 724 |
| }, |
| { |
| "epoch": 2.2894736842105265, |
| "grad_norm": 0.5115761160850525, |
| "learning_rate": 1.5936761613051937e-06, |
| "loss": 0.6489, |
| "step": 725 |
| }, |
| { |
| "epoch": 2.2926315789473684, |
| "grad_norm": 0.5598406195640564, |
| "learning_rate": 1.580218858212454e-06, |
| "loss": 0.6, |
| "step": 726 |
| }, |
| { |
| "epoch": 2.2957894736842106, |
| "grad_norm": 0.5273876190185547, |
| "learning_rate": 1.5668079424635424e-06, |
| "loss": 0.6227, |
| "step": 727 |
| }, |
| { |
| "epoch": 2.298947368421053, |
| "grad_norm": 0.5451186299324036, |
| "learning_rate": 1.5534435959697363e-06, |
| "loss": 0.5881, |
| "step": 728 |
| }, |
| { |
| "epoch": 2.3021052631578947, |
| "grad_norm": 0.5124930739402771, |
| "learning_rate": 1.5401260000106321e-06, |
| "loss": 0.6739, |
| "step": 729 |
| }, |
| { |
| "epoch": 2.305263157894737, |
| "grad_norm": 0.5419645309448242, |
| "learning_rate": 1.526855335231679e-06, |
| "loss": 0.6209, |
| "step": 730 |
| }, |
| { |
| "epoch": 2.308421052631579, |
| "grad_norm": 0.5517361760139465, |
| "learning_rate": 1.5136317816417333e-06, |
| "loss": 0.6345, |
| "step": 731 |
| }, |
| { |
| "epoch": 2.311578947368421, |
| "grad_norm": 0.5226176977157593, |
| "learning_rate": 1.5004555186106124e-06, |
| "loss": 0.6008, |
| "step": 732 |
| }, |
| { |
| "epoch": 2.314736842105263, |
| "grad_norm": 0.5727024078369141, |
| "learning_rate": 1.487326724866668e-06, |
| "loss": 0.5842, |
| "step": 733 |
| }, |
| { |
| "epoch": 2.3178947368421055, |
| "grad_norm": 0.5469027161598206, |
| "learning_rate": 1.4742455784943576e-06, |
| "loss": 0.6151, |
| "step": 734 |
| }, |
| { |
| "epoch": 2.3210526315789473, |
| "grad_norm": 0.5690147280693054, |
| "learning_rate": 1.4612122569318282e-06, |
| "loss": 0.566, |
| "step": 735 |
| }, |
| { |
| "epoch": 2.3242105263157895, |
| "grad_norm": 0.5304024815559387, |
| "learning_rate": 1.448226936968517e-06, |
| "loss": 0.6047, |
| "step": 736 |
| }, |
| { |
| "epoch": 2.3273684210526318, |
| "grad_norm": 0.5012169480323792, |
| "learning_rate": 1.4352897947427396e-06, |
| "loss": 0.6352, |
| "step": 737 |
| }, |
| { |
| "epoch": 2.3305263157894736, |
| "grad_norm": 0.49926042556762695, |
| "learning_rate": 1.422401005739314e-06, |
| "loss": 0.6673, |
| "step": 738 |
| }, |
| { |
| "epoch": 2.333684210526316, |
| "grad_norm": 0.5833967924118042, |
| "learning_rate": 1.4095607447871711e-06, |
| "loss": 0.5515, |
| "step": 739 |
| }, |
| { |
| "epoch": 2.336842105263158, |
| "grad_norm": 0.6136566996574402, |
| "learning_rate": 1.3967691860569915e-06, |
| "loss": 0.5947, |
| "step": 740 |
| }, |
| { |
| "epoch": 2.34, |
| "grad_norm": 0.5746322274208069, |
| "learning_rate": 1.3840265030588323e-06, |
| "loss": 0.6308, |
| "step": 741 |
| }, |
| { |
| "epoch": 2.343157894736842, |
| "grad_norm": 0.5507466197013855, |
| "learning_rate": 1.3713328686397832e-06, |
| "loss": 0.6041, |
| "step": 742 |
| }, |
| { |
| "epoch": 2.3463157894736844, |
| "grad_norm": 0.524361252784729, |
| "learning_rate": 1.358688454981621e-06, |
| "loss": 0.5928, |
| "step": 743 |
| }, |
| { |
| "epoch": 2.349473684210526, |
| "grad_norm": 0.5451714396476746, |
| "learning_rate": 1.3460934335984677e-06, |
| "loss": 0.6086, |
| "step": 744 |
| }, |
| { |
| "epoch": 2.3526315789473684, |
| "grad_norm": 0.5549687147140503, |
| "learning_rate": 1.3335479753344688e-06, |
| "loss": 0.6241, |
| "step": 745 |
| }, |
| { |
| "epoch": 2.3557894736842107, |
| "grad_norm": 0.5424072742462158, |
| "learning_rate": 1.3210522503614753e-06, |
| "loss": 0.6078, |
| "step": 746 |
| }, |
| { |
| "epoch": 2.3589473684210525, |
| "grad_norm": 0.5681266188621521, |
| "learning_rate": 1.3086064281767346e-06, |
| "loss": 0.5994, |
| "step": 747 |
| }, |
| { |
| "epoch": 2.3621052631578947, |
| "grad_norm": 0.5240947008132935, |
| "learning_rate": 1.2962106776005917e-06, |
| "loss": 0.6148, |
| "step": 748 |
| }, |
| { |
| "epoch": 2.365263157894737, |
| "grad_norm": 0.5335183143615723, |
| "learning_rate": 1.2838651667742014e-06, |
| "loss": 0.6244, |
| "step": 749 |
| }, |
| { |
| "epoch": 2.3684210526315788, |
| "grad_norm": 0.5286924839019775, |
| "learning_rate": 1.2715700631572387e-06, |
| "loss": 0.6306, |
| "step": 750 |
| }, |
| { |
| "epoch": 2.371578947368421, |
| "grad_norm": 0.527306854724884, |
| "learning_rate": 1.2593255335256438e-06, |
| "loss": 0.64, |
| "step": 751 |
| }, |
| { |
| "epoch": 2.3747368421052633, |
| "grad_norm": 0.5724322199821472, |
| "learning_rate": 1.2471317439693436e-06, |
| "loss": 0.6075, |
| "step": 752 |
| }, |
| { |
| "epoch": 2.377894736842105, |
| "grad_norm": 0.5209251046180725, |
| "learning_rate": 1.2349888598900078e-06, |
| "loss": 0.5929, |
| "step": 753 |
| }, |
| { |
| "epoch": 2.3810526315789473, |
| "grad_norm": 0.53974848985672, |
| "learning_rate": 1.2228970459988015e-06, |
| "loss": 0.5841, |
| "step": 754 |
| }, |
| { |
| "epoch": 2.3842105263157896, |
| "grad_norm": 0.5406898260116577, |
| "learning_rate": 1.2108564663141541e-06, |
| "loss": 0.6004, |
| "step": 755 |
| }, |
| { |
| "epoch": 2.3873684210526314, |
| "grad_norm": 0.5027428269386292, |
| "learning_rate": 1.1988672841595312e-06, |
| "loss": 0.6057, |
| "step": 756 |
| }, |
| { |
| "epoch": 2.3905263157894736, |
| "grad_norm": 0.5120511054992676, |
| "learning_rate": 1.186929662161221e-06, |
| "loss": 0.6129, |
| "step": 757 |
| }, |
| { |
| "epoch": 2.393684210526316, |
| "grad_norm": 0.5876862406730652, |
| "learning_rate": 1.1750437622461293e-06, |
| "loss": 0.5984, |
| "step": 758 |
| }, |
| { |
| "epoch": 2.3968421052631577, |
| "grad_norm": 0.5318094491958618, |
| "learning_rate": 1.1632097456395802e-06, |
| "loss": 0.631, |
| "step": 759 |
| }, |
| { |
| "epoch": 2.4, |
| "grad_norm": 0.48409977555274963, |
| "learning_rate": 1.1514277728631323e-06, |
| "loss": 0.6322, |
| "step": 760 |
| }, |
| { |
| "epoch": 2.403157894736842, |
| "grad_norm": 0.5341342687606812, |
| "learning_rate": 1.1396980037324e-06, |
| "loss": 0.6, |
| "step": 761 |
| }, |
| { |
| "epoch": 2.406315789473684, |
| "grad_norm": 0.5514808893203735, |
| "learning_rate": 1.128020597354884e-06, |
| "loss": 0.6348, |
| "step": 762 |
| }, |
| { |
| "epoch": 2.409473684210526, |
| "grad_norm": 0.5363562107086182, |
| "learning_rate": 1.1163957121278163e-06, |
| "loss": 0.5762, |
| "step": 763 |
| }, |
| { |
| "epoch": 2.4126315789473685, |
| "grad_norm": 0.5589635968208313, |
| "learning_rate": 1.104823505736009e-06, |
| "loss": 0.6406, |
| "step": 764 |
| }, |
| { |
| "epoch": 2.4157894736842107, |
| "grad_norm": 0.5272628664970398, |
| "learning_rate": 1.093304135149717e-06, |
| "loss": 0.5923, |
| "step": 765 |
| }, |
| { |
| "epoch": 2.4189473684210525, |
| "grad_norm": 0.54401695728302, |
| "learning_rate": 1.0818377566225075e-06, |
| "loss": 0.5936, |
| "step": 766 |
| }, |
| { |
| "epoch": 2.4221052631578948, |
| "grad_norm": 0.5256767272949219, |
| "learning_rate": 1.070424525689142e-06, |
| "loss": 0.6173, |
| "step": 767 |
| }, |
| { |
| "epoch": 2.425263157894737, |
| "grad_norm": 0.5305107831954956, |
| "learning_rate": 1.0590645971634655e-06, |
| "loss": 0.5995, |
| "step": 768 |
| }, |
| { |
| "epoch": 2.428421052631579, |
| "grad_norm": 0.5133773684501648, |
| "learning_rate": 1.0477581251363066e-06, |
| "loss": 0.6294, |
| "step": 769 |
| }, |
| { |
| "epoch": 2.431578947368421, |
| "grad_norm": 0.5675320625305176, |
| "learning_rate": 1.0365052629733884e-06, |
| "loss": 0.5941, |
| "step": 770 |
| }, |
| { |
| "epoch": 2.4347368421052633, |
| "grad_norm": 0.5107877254486084, |
| "learning_rate": 1.025306163313246e-06, |
| "loss": 0.6195, |
| "step": 771 |
| }, |
| { |
| "epoch": 2.437894736842105, |
| "grad_norm": 0.5630270838737488, |
| "learning_rate": 1.0141609780651585e-06, |
| "loss": 0.6353, |
| "step": 772 |
| }, |
| { |
| "epoch": 2.4410526315789474, |
| "grad_norm": 0.4963136911392212, |
| "learning_rate": 1.0030698584070848e-06, |
| "loss": 0.6019, |
| "step": 773 |
| }, |
| { |
| "epoch": 2.4442105263157896, |
| "grad_norm": 0.5430152416229248, |
| "learning_rate": 9.92032954783621e-07, |
| "loss": 0.6518, |
| "step": 774 |
| }, |
| { |
| "epoch": 2.4473684210526314, |
| "grad_norm": 0.5275354981422424, |
| "learning_rate": 9.81050416903951e-07, |
| "loss": 0.6547, |
| "step": 775 |
| }, |
| { |
| "epoch": 2.4505263157894737, |
| "grad_norm": 0.5483253598213196, |
| "learning_rate": 9.701223937398152e-07, |
| "loss": 0.634, |
| "step": 776 |
| }, |
| { |
| "epoch": 2.453684210526316, |
| "grad_norm": 0.5382000803947449, |
| "learning_rate": 9.592490335234993e-07, |
| "loss": 0.648, |
| "step": 777 |
| }, |
| { |
| "epoch": 2.4568421052631577, |
| "grad_norm": 0.5335687398910522, |
| "learning_rate": 9.484304837458158e-07, |
| "loss": 0.5985, |
| "step": 778 |
| }, |
| { |
| "epoch": 2.46, |
| "grad_norm": 0.5186282992362976, |
| "learning_rate": 9.376668911541042e-07, |
| "loss": 0.6092, |
| "step": 779 |
| }, |
| { |
| "epoch": 2.463157894736842, |
| "grad_norm": 0.517254114151001, |
| "learning_rate": 9.269584017502431e-07, |
| "loss": 0.6524, |
| "step": 780 |
| }, |
| { |
| "epoch": 2.466315789473684, |
| "grad_norm": 0.5538238286972046, |
| "learning_rate": 9.163051607886703e-07, |
| "loss": 0.6008, |
| "step": 781 |
| }, |
| { |
| "epoch": 2.4694736842105263, |
| "grad_norm": 0.5268839597702026, |
| "learning_rate": 9.057073127744065e-07, |
| "loss": 0.6115, |
| "step": 782 |
| }, |
| { |
| "epoch": 2.4726315789473685, |
| "grad_norm": 0.5400952696800232, |
| "learning_rate": 8.951650014611019e-07, |
| "loss": 0.5816, |
| "step": 783 |
| }, |
| { |
| "epoch": 2.4757894736842108, |
| "grad_norm": 0.571423351764679, |
| "learning_rate": 8.846783698490835e-07, |
| "loss": 0.6134, |
| "step": 784 |
| }, |
| { |
| "epoch": 2.4789473684210526, |
| "grad_norm": 0.5169550180435181, |
| "learning_rate": 8.742475601834133e-07, |
| "loss": 0.6033, |
| "step": 785 |
| }, |
| { |
| "epoch": 2.482105263157895, |
| "grad_norm": 0.5374566912651062, |
| "learning_rate": 8.638727139519637e-07, |
| "loss": 0.597, |
| "step": 786 |
| }, |
| { |
| "epoch": 2.485263157894737, |
| "grad_norm": 0.5302397012710571, |
| "learning_rate": 8.535539718834929e-07, |
| "loss": 0.6735, |
| "step": 787 |
| }, |
| { |
| "epoch": 2.488421052631579, |
| "grad_norm": 0.5013649463653564, |
| "learning_rate": 8.432914739457432e-07, |
| "loss": 0.6714, |
| "step": 788 |
| }, |
| { |
| "epoch": 2.491578947368421, |
| "grad_norm": 0.5414134860038757, |
| "learning_rate": 8.330853593435345e-07, |
| "loss": 0.6105, |
| "step": 789 |
| }, |
| { |
| "epoch": 2.4947368421052634, |
| "grad_norm": 0.5048105120658875, |
| "learning_rate": 8.229357665168791e-07, |
| "loss": 0.6186, |
| "step": 790 |
| }, |
| { |
| "epoch": 2.497894736842105, |
| "grad_norm": 0.5406049489974976, |
| "learning_rate": 8.12842833139107e-07, |
| "loss": 0.6146, |
| "step": 791 |
| }, |
| { |
| "epoch": 2.5010526315789474, |
| "grad_norm": 0.5164625644683838, |
| "learning_rate": 8.028066961149921e-07, |
| "loss": 0.6194, |
| "step": 792 |
| }, |
| { |
| "epoch": 2.5042105263157897, |
| "grad_norm": 0.5256004929542542, |
| "learning_rate": 7.928274915789035e-07, |
| "loss": 0.6136, |
| "step": 793 |
| }, |
| { |
| "epoch": 2.5073684210526315, |
| "grad_norm": 0.5570123791694641, |
| "learning_rate": 7.829053548929488e-07, |
| "loss": 0.6325, |
| "step": 794 |
| }, |
| { |
| "epoch": 2.5105263157894737, |
| "grad_norm": 0.5029011964797974, |
| "learning_rate": 7.730404206451459e-07, |
| "loss": 0.599, |
| "step": 795 |
| }, |
| { |
| "epoch": 2.513684210526316, |
| "grad_norm": 0.5093344449996948, |
| "learning_rate": 7.632328226475971e-07, |
| "loss": 0.6007, |
| "step": 796 |
| }, |
| { |
| "epoch": 2.5168421052631578, |
| "grad_norm": 0.4984530508518219, |
| "learning_rate": 7.53482693934669e-07, |
| "loss": 0.639, |
| "step": 797 |
| }, |
| { |
| "epoch": 2.52, |
| "grad_norm": 0.5288375616073608, |
| "learning_rate": 7.437901667611908e-07, |
| "loss": 0.6021, |
| "step": 798 |
| }, |
| { |
| "epoch": 2.5231578947368423, |
| "grad_norm": 0.5130994915962219, |
| "learning_rate": 7.341553726006611e-07, |
| "loss": 0.6125, |
| "step": 799 |
| }, |
| { |
| "epoch": 2.526315789473684, |
| "grad_norm": 0.5277830958366394, |
| "learning_rate": 7.245784421434643e-07, |
| "loss": 0.6388, |
| "step": 800 |
| }, |
| { |
| "epoch": 2.5294736842105263, |
| "grad_norm": 0.5155095458030701, |
| "learning_rate": 7.150595052950954e-07, |
| "loss": 0.6319, |
| "step": 801 |
| }, |
| { |
| "epoch": 2.5326315789473686, |
| "grad_norm": 0.5809716582298279, |
| "learning_rate": 7.055986911744017e-07, |
| "loss": 0.578, |
| "step": 802 |
| }, |
| { |
| "epoch": 2.5357894736842104, |
| "grad_norm": 0.5170502066612244, |
| "learning_rate": 6.961961281118285e-07, |
| "loss": 0.6074, |
| "step": 803 |
| }, |
| { |
| "epoch": 2.5389473684210526, |
| "grad_norm": 0.5173423290252686, |
| "learning_rate": 6.868519436476795e-07, |
| "loss": 0.631, |
| "step": 804 |
| }, |
| { |
| "epoch": 2.542105263157895, |
| "grad_norm": 0.49472615122795105, |
| "learning_rate": 6.775662645303871e-07, |
| "loss": 0.6439, |
| "step": 805 |
| }, |
| { |
| "epoch": 2.5452631578947367, |
| "grad_norm": 0.5428845882415771, |
| "learning_rate": 6.683392167147917e-07, |
| "loss": 0.6203, |
| "step": 806 |
| }, |
| { |
| "epoch": 2.548421052631579, |
| "grad_norm": 0.5209921002388, |
| "learning_rate": 6.591709253604356e-07, |
| "loss": 0.6588, |
| "step": 807 |
| }, |
| { |
| "epoch": 2.551578947368421, |
| "grad_norm": 0.5301423668861389, |
| "learning_rate": 6.500615148298617e-07, |
| "loss": 0.6003, |
| "step": 808 |
| }, |
| { |
| "epoch": 2.554736842105263, |
| "grad_norm": 0.5289193391799927, |
| "learning_rate": 6.410111086869314e-07, |
| "loss": 0.6389, |
| "step": 809 |
| }, |
| { |
| "epoch": 2.557894736842105, |
| "grad_norm": 0.5304815769195557, |
| "learning_rate": 6.320198296951435e-07, |
| "loss": 0.5732, |
| "step": 810 |
| }, |
| { |
| "epoch": 2.5610526315789475, |
| "grad_norm": 0.5228027701377869, |
| "learning_rate": 6.230877998159724e-07, |
| "loss": 0.6278, |
| "step": 811 |
| }, |
| { |
| "epoch": 2.5642105263157893, |
| "grad_norm": 0.5397827625274658, |
| "learning_rate": 6.142151402072133e-07, |
| "loss": 0.5979, |
| "step": 812 |
| }, |
| { |
| "epoch": 2.5673684210526315, |
| "grad_norm": 0.5186311602592468, |
| "learning_rate": 6.054019712213377e-07, |
| "loss": 0.5872, |
| "step": 813 |
| }, |
| { |
| "epoch": 2.5705263157894738, |
| "grad_norm": 0.5260378122329712, |
| "learning_rate": 5.966484124038602e-07, |
| "loss": 0.6337, |
| "step": 814 |
| }, |
| { |
| "epoch": 2.5736842105263156, |
| "grad_norm": 0.556025505065918, |
| "learning_rate": 5.879545824917199e-07, |
| "loss": 0.5965, |
| "step": 815 |
| }, |
| { |
| "epoch": 2.576842105263158, |
| "grad_norm": 0.5116238594055176, |
| "learning_rate": 5.793205994116674e-07, |
| "loss": 0.6147, |
| "step": 816 |
| }, |
| { |
| "epoch": 2.58, |
| "grad_norm": 0.5269439816474915, |
| "learning_rate": 5.707465802786655e-07, |
| "loss": 0.5967, |
| "step": 817 |
| }, |
| { |
| "epoch": 2.583157894736842, |
| "grad_norm": 0.5351024270057678, |
| "learning_rate": 5.622326413942997e-07, |
| "loss": 0.6241, |
| "step": 818 |
| }, |
| { |
| "epoch": 2.586315789473684, |
| "grad_norm": 0.536216676235199, |
| "learning_rate": 5.537788982452052e-07, |
| "loss": 0.631, |
| "step": 819 |
| }, |
| { |
| "epoch": 2.5894736842105264, |
| "grad_norm": 0.5194189548492432, |
| "learning_rate": 5.453854655014956e-07, |
| "loss": 0.6394, |
| "step": 820 |
| }, |
| { |
| "epoch": 2.592631578947368, |
| "grad_norm": 0.5215575098991394, |
| "learning_rate": 5.370524570152059e-07, |
| "loss": 0.5948, |
| "step": 821 |
| }, |
| { |
| "epoch": 2.5957894736842104, |
| "grad_norm": 0.5416327118873596, |
| "learning_rate": 5.287799858187548e-07, |
| "loss": 0.6352, |
| "step": 822 |
| }, |
| { |
| "epoch": 2.5989473684210527, |
| "grad_norm": 0.5264502167701721, |
| "learning_rate": 5.205681641234062e-07, |
| "loss": 0.61, |
| "step": 823 |
| }, |
| { |
| "epoch": 2.6021052631578945, |
| "grad_norm": 0.5675118565559387, |
| "learning_rate": 5.1241710331775e-07, |
| "loss": 0.5791, |
| "step": 824 |
| }, |
| { |
| "epoch": 2.6052631578947367, |
| "grad_norm": 0.49685797095298767, |
| "learning_rate": 5.043269139661872e-07, |
| "loss": 0.6316, |
| "step": 825 |
| }, |
| { |
| "epoch": 2.608421052631579, |
| "grad_norm": 0.5199131965637207, |
| "learning_rate": 4.962977058074381e-07, |
| "loss": 0.6217, |
| "step": 826 |
| }, |
| { |
| "epoch": 2.6115789473684208, |
| "grad_norm": 0.5157626271247864, |
| "learning_rate": 4.883295877530431e-07, |
| "loss": 0.6394, |
| "step": 827 |
| }, |
| { |
| "epoch": 2.614736842105263, |
| "grad_norm": 0.5056947469711304, |
| "learning_rate": 4.804226678858936e-07, |
| "loss": 0.6168, |
| "step": 828 |
| }, |
| { |
| "epoch": 2.6178947368421053, |
| "grad_norm": 0.5152086019515991, |
| "learning_rate": 4.725770534587637e-07, |
| "loss": 0.5963, |
| "step": 829 |
| }, |
| { |
| "epoch": 2.6210526315789475, |
| "grad_norm": 0.4946594536304474, |
| "learning_rate": 4.647928508928512e-07, |
| "loss": 0.6344, |
| "step": 830 |
| }, |
| { |
| "epoch": 2.6242105263157893, |
| "grad_norm": 0.5005388855934143, |
| "learning_rate": 4.5707016577634156e-07, |
| "loss": 0.6797, |
| "step": 831 |
| }, |
| { |
| "epoch": 2.6273684210526316, |
| "grad_norm": 0.49225690960884094, |
| "learning_rate": 4.494091028629699e-07, |
| "loss": 0.6552, |
| "step": 832 |
| }, |
| { |
| "epoch": 2.630526315789474, |
| "grad_norm": 0.5395132303237915, |
| "learning_rate": 4.418097660706039e-07, |
| "loss": 0.6197, |
| "step": 833 |
| }, |
| { |
| "epoch": 2.6336842105263156, |
| "grad_norm": 0.5258274674415588, |
| "learning_rate": 4.342722584798298e-07, |
| "loss": 0.6151, |
| "step": 834 |
| }, |
| { |
| "epoch": 2.636842105263158, |
| "grad_norm": 0.507228672504425, |
| "learning_rate": 4.267966823325581e-07, |
| "loss": 0.6213, |
| "step": 835 |
| }, |
| { |
| "epoch": 2.64, |
| "grad_norm": 0.5385991334915161, |
| "learning_rate": 4.193831390306352e-07, |
| "loss": 0.6423, |
| "step": 836 |
| }, |
| { |
| "epoch": 2.6431578947368424, |
| "grad_norm": 0.5073803067207336, |
| "learning_rate": 4.1203172913446774e-07, |
| "loss": 0.5812, |
| "step": 837 |
| }, |
| { |
| "epoch": 2.646315789473684, |
| "grad_norm": 0.5264935493469238, |
| "learning_rate": 4.047425523616577e-07, |
| "loss": 0.6305, |
| "step": 838 |
| }, |
| { |
| "epoch": 2.6494736842105264, |
| "grad_norm": 0.4937891364097595, |
| "learning_rate": 3.9751570758565284e-07, |
| "loss": 0.6197, |
| "step": 839 |
| }, |
| { |
| "epoch": 2.6526315789473687, |
| "grad_norm": 0.5468772053718567, |
| "learning_rate": 3.9035129283440165e-07, |
| "loss": 0.592, |
| "step": 840 |
| }, |
| { |
| "epoch": 2.6557894736842105, |
| "grad_norm": 0.5518105626106262, |
| "learning_rate": 3.8324940528902845e-07, |
| "loss": 0.6332, |
| "step": 841 |
| }, |
| { |
| "epoch": 2.6589473684210527, |
| "grad_norm": 0.5344856381416321, |
| "learning_rate": 3.762101412825098e-07, |
| "loss": 0.6221, |
| "step": 842 |
| }, |
| { |
| "epoch": 2.662105263157895, |
| "grad_norm": 0.527391791343689, |
| "learning_rate": 3.6923359629837117e-07, |
| "loss": 0.5941, |
| "step": 843 |
| }, |
| { |
| "epoch": 2.665263157894737, |
| "grad_norm": 0.5052513480186462, |
| "learning_rate": 3.6231986496939153e-07, |
| "loss": 0.6352, |
| "step": 844 |
| }, |
| { |
| "epoch": 2.668421052631579, |
| "grad_norm": 0.5419393181800842, |
| "learning_rate": 3.554690410763173e-07, |
| "loss": 0.6058, |
| "step": 845 |
| }, |
| { |
| "epoch": 2.6715789473684213, |
| "grad_norm": 0.5119031667709351, |
| "learning_rate": 3.4868121754659533e-07, |
| "loss": 0.5942, |
| "step": 846 |
| }, |
| { |
| "epoch": 2.674736842105263, |
| "grad_norm": 0.4917730689048767, |
| "learning_rate": 3.4195648645310443e-07, |
| "loss": 0.6145, |
| "step": 847 |
| }, |
| { |
| "epoch": 2.6778947368421053, |
| "grad_norm": 0.5071961283683777, |
| "learning_rate": 3.3529493901291567e-07, |
| "loss": 0.6481, |
| "step": 848 |
| }, |
| { |
| "epoch": 2.6810526315789476, |
| "grad_norm": 0.5443476438522339, |
| "learning_rate": 3.286966655860485e-07, |
| "loss": 0.5775, |
| "step": 849 |
| }, |
| { |
| "epoch": 2.6842105263157894, |
| "grad_norm": 0.5274863839149475, |
| "learning_rate": 3.2216175567424737e-07, |
| "loss": 0.5835, |
| "step": 850 |
| }, |
| { |
| "epoch": 2.6873684210526316, |
| "grad_norm": 0.5420557856559753, |
| "learning_rate": 3.156902979197679e-07, |
| "loss": 0.5913, |
| "step": 851 |
| }, |
| { |
| "epoch": 2.690526315789474, |
| "grad_norm": 0.5218223929405212, |
| "learning_rate": 3.0928238010417275e-07, |
| "loss": 0.6144, |
| "step": 852 |
| }, |
| { |
| "epoch": 2.6936842105263157, |
| "grad_norm": 0.5233944654464722, |
| "learning_rate": 3.029380891471445e-07, |
| "loss": 0.632, |
| "step": 853 |
| }, |
| { |
| "epoch": 2.696842105263158, |
| "grad_norm": 0.5044023394584656, |
| "learning_rate": 2.966575111053027e-07, |
| "loss": 0.5874, |
| "step": 854 |
| }, |
| { |
| "epoch": 2.7, |
| "grad_norm": 0.5117988586425781, |
| "learning_rate": 2.9044073117103777e-07, |
| "loss": 0.6533, |
| "step": 855 |
| }, |
| { |
| "epoch": 2.703157894736842, |
| "grad_norm": 0.511157751083374, |
| "learning_rate": 2.842878336713578e-07, |
| "loss": 0.6282, |
| "step": 856 |
| }, |
| { |
| "epoch": 2.7063157894736842, |
| "grad_norm": 0.5114948749542236, |
| "learning_rate": 2.7819890206674083e-07, |
| "loss": 0.6355, |
| "step": 857 |
| }, |
| { |
| "epoch": 2.7094736842105265, |
| "grad_norm": 0.549742579460144, |
| "learning_rate": 2.7217401895000664e-07, |
| "loss": 0.6086, |
| "step": 858 |
| }, |
| { |
| "epoch": 2.7126315789473683, |
| "grad_norm": 0.5112407803535461, |
| "learning_rate": 2.6621326604519216e-07, |
| "loss": 0.6062, |
| "step": 859 |
| }, |
| { |
| "epoch": 2.7157894736842105, |
| "grad_norm": 0.5380560755729675, |
| "learning_rate": 2.6031672420644694e-07, |
| "loss": 0.5904, |
| "step": 860 |
| }, |
| { |
| "epoch": 2.718947368421053, |
| "grad_norm": 0.49735158681869507, |
| "learning_rate": 2.5448447341693493e-07, |
| "loss": 0.593, |
| "step": 861 |
| }, |
| { |
| "epoch": 2.7221052631578946, |
| "grad_norm": 0.4785706698894501, |
| "learning_rate": 2.4871659278774884e-07, |
| "loss": 0.6211, |
| "step": 862 |
| }, |
| { |
| "epoch": 2.725263157894737, |
| "grad_norm": 0.5243927240371704, |
| "learning_rate": 2.430131605568353e-07, |
| "loss": 0.6143, |
| "step": 863 |
| }, |
| { |
| "epoch": 2.728421052631579, |
| "grad_norm": 0.5355093479156494, |
| "learning_rate": 2.3737425408794202e-07, |
| "loss": 0.6162, |
| "step": 864 |
| }, |
| { |
| "epoch": 2.731578947368421, |
| "grad_norm": 0.5540766716003418, |
| "learning_rate": 2.31799949869555e-07, |
| "loss": 0.5777, |
| "step": 865 |
| }, |
| { |
| "epoch": 2.734736842105263, |
| "grad_norm": 0.5112879276275635, |
| "learning_rate": 2.2629032351387247e-07, |
| "loss": 0.5949, |
| "step": 866 |
| }, |
| { |
| "epoch": 2.7378947368421054, |
| "grad_norm": 0.5168077945709229, |
| "learning_rate": 2.2084544975577383e-07, |
| "loss": 0.6391, |
| "step": 867 |
| }, |
| { |
| "epoch": 2.741052631578947, |
| "grad_norm": 0.5143442749977112, |
| "learning_rate": 2.1546540245180825e-07, |
| "loss": 0.6142, |
| "step": 868 |
| }, |
| { |
| "epoch": 2.7442105263157894, |
| "grad_norm": 0.49836885929107666, |
| "learning_rate": 2.1015025457919002e-07, |
| "loss": 0.5926, |
| "step": 869 |
| }, |
| { |
| "epoch": 2.7473684210526317, |
| "grad_norm": 0.5337677597999573, |
| "learning_rate": 2.0490007823481096e-07, |
| "loss": 0.5758, |
| "step": 870 |
| }, |
| { |
| "epoch": 2.7505263157894735, |
| "grad_norm": 0.5162203311920166, |
| "learning_rate": 1.9971494463426332e-07, |
| "loss": 0.6044, |
| "step": 871 |
| }, |
| { |
| "epoch": 2.7536842105263157, |
| "grad_norm": 0.5083566308021545, |
| "learning_rate": 1.9459492411087078e-07, |
| "loss": 0.6377, |
| "step": 872 |
| }, |
| { |
| "epoch": 2.756842105263158, |
| "grad_norm": 0.5102013349533081, |
| "learning_rate": 1.8954008611473618e-07, |
| "loss": 0.6287, |
| "step": 873 |
| }, |
| { |
| "epoch": 2.76, |
| "grad_norm": 0.5197393894195557, |
| "learning_rate": 1.8455049921179858e-07, |
| "loss": 0.6224, |
| "step": 874 |
| }, |
| { |
| "epoch": 2.763157894736842, |
| "grad_norm": 0.4867171347141266, |
| "learning_rate": 1.7962623108290556e-07, |
| "loss": 0.6475, |
| "step": 875 |
| }, |
| { |
| "epoch": 2.7663157894736843, |
| "grad_norm": 0.5163273215293884, |
| "learning_rate": 1.7476734852289235e-07, |
| "loss": 0.6408, |
| "step": 876 |
| }, |
| { |
| "epoch": 2.769473684210526, |
| "grad_norm": 0.5355550646781921, |
| "learning_rate": 1.6997391743967696e-07, |
| "loss": 0.6129, |
| "step": 877 |
| }, |
| { |
| "epoch": 2.7726315789473683, |
| "grad_norm": 0.5214855670928955, |
| "learning_rate": 1.65246002853367e-07, |
| "loss": 0.5987, |
| "step": 878 |
| }, |
| { |
| "epoch": 2.7757894736842106, |
| "grad_norm": 0.5373660922050476, |
| "learning_rate": 1.6058366889537546e-07, |
| "loss": 0.5472, |
| "step": 879 |
| }, |
| { |
| "epoch": 2.7789473684210524, |
| "grad_norm": 0.49646681547164917, |
| "learning_rate": 1.559869788075541e-07, |
| "loss": 0.612, |
| "step": 880 |
| }, |
| { |
| "epoch": 2.7821052631578946, |
| "grad_norm": 0.5255357623100281, |
| "learning_rate": 1.514559949413319e-07, |
| "loss": 0.6083, |
| "step": 881 |
| }, |
| { |
| "epoch": 2.785263157894737, |
| "grad_norm": 0.5195873379707336, |
| "learning_rate": 1.4699077875687252e-07, |
| "loss": 0.6032, |
| "step": 882 |
| }, |
| { |
| "epoch": 2.7884210526315787, |
| "grad_norm": 0.5284191370010376, |
| "learning_rate": 1.4259139082223761e-07, |
| "loss": 0.6466, |
| "step": 883 |
| }, |
| { |
| "epoch": 2.791578947368421, |
| "grad_norm": 0.5327157378196716, |
| "learning_rate": 1.3825789081256812e-07, |
| "loss": 0.5883, |
| "step": 884 |
| }, |
| { |
| "epoch": 2.794736842105263, |
| "grad_norm": 0.5047375559806824, |
| "learning_rate": 1.3399033750927327e-07, |
| "loss": 0.6168, |
| "step": 885 |
| }, |
| { |
| "epoch": 2.797894736842105, |
| "grad_norm": 0.5161851644515991, |
| "learning_rate": 1.297887887992344e-07, |
| "loss": 0.615, |
| "step": 886 |
| }, |
| { |
| "epoch": 2.8010526315789472, |
| "grad_norm": 0.5051410794258118, |
| "learning_rate": 1.2565330167401747e-07, |
| "loss": 0.6444, |
| "step": 887 |
| }, |
| { |
| "epoch": 2.8042105263157895, |
| "grad_norm": 0.5022399425506592, |
| "learning_rate": 1.2158393222910235e-07, |
| "loss": 0.6318, |
| "step": 888 |
| }, |
| { |
| "epoch": 2.8073684210526317, |
| "grad_norm": 0.4974055588245392, |
| "learning_rate": 1.175807356631209e-07, |
| "loss": 0.6291, |
| "step": 889 |
| }, |
| { |
| "epoch": 2.8105263157894735, |
| "grad_norm": 0.4854937493801117, |
| "learning_rate": 1.1364376627710727e-07, |
| "loss": 0.6456, |
| "step": 890 |
| }, |
| { |
| "epoch": 2.813684210526316, |
| "grad_norm": 0.5069525837898254, |
| "learning_rate": 1.0977307747376431e-07, |
| "loss": 0.6226, |
| "step": 891 |
| }, |
| { |
| "epoch": 2.816842105263158, |
| "grad_norm": 0.5431550145149231, |
| "learning_rate": 1.0596872175673456e-07, |
| "loss": 0.6005, |
| "step": 892 |
| }, |
| { |
| "epoch": 2.82, |
| "grad_norm": 0.5111134052276611, |
| "learning_rate": 1.0223075072989418e-07, |
| "loss": 0.6243, |
| "step": 893 |
| }, |
| { |
| "epoch": 2.823157894736842, |
| "grad_norm": 0.5054842829704285, |
| "learning_rate": 9.855921509664745e-08, |
| "loss": 0.6145, |
| "step": 894 |
| }, |
| { |
| "epoch": 2.8263157894736843, |
| "grad_norm": 0.5049132108688354, |
| "learning_rate": 9.495416465924113e-08, |
| "loss": 0.6283, |
| "step": 895 |
| }, |
| { |
| "epoch": 2.8294736842105266, |
| "grad_norm": 0.5091620087623596, |
| "learning_rate": 9.141564831808947e-08, |
| "loss": 0.6245, |
| "step": 896 |
| }, |
| { |
| "epoch": 2.8326315789473684, |
| "grad_norm": 0.5132347941398621, |
| "learning_rate": 8.794371407111091e-08, |
| "loss": 0.6149, |
| "step": 897 |
| }, |
| { |
| "epoch": 2.8357894736842106, |
| "grad_norm": 0.5056635737419128, |
| "learning_rate": 8.45384090130752e-08, |
| "loss": 0.6408, |
| "step": 898 |
| }, |
| { |
| "epoch": 2.838947368421053, |
| "grad_norm": 0.4924507141113281, |
| "learning_rate": 8.11997793349667e-08, |
| "loss": 0.622, |
| "step": 899 |
| }, |
| { |
| "epoch": 2.8421052631578947, |
| "grad_norm": 0.4771850109100342, |
| "learning_rate": 7.792787032335657e-08, |
| "loss": 0.6321, |
| "step": 900 |
| }, |
| { |
| "epoch": 2.845263157894737, |
| "grad_norm": 0.5036438703536987, |
| "learning_rate": 7.472272635978995e-08, |
| "loss": 0.6403, |
| "step": 901 |
| }, |
| { |
| "epoch": 2.848421052631579, |
| "grad_norm": 0.5470741987228394, |
| "learning_rate": 7.158439092018077e-08, |
| "loss": 0.6147, |
| "step": 902 |
| }, |
| { |
| "epoch": 2.851578947368421, |
| "grad_norm": 0.4736262261867523, |
| "learning_rate": 6.851290657422627e-08, |
| "loss": 0.6591, |
| "step": 903 |
| }, |
| { |
| "epoch": 2.8547368421052632, |
| "grad_norm": 0.4981045424938202, |
| "learning_rate": 6.550831498482679e-08, |
| "loss": 0.5957, |
| "step": 904 |
| }, |
| { |
| "epoch": 2.8578947368421055, |
| "grad_norm": 0.5055891275405884, |
| "learning_rate": 6.257065690752129e-08, |
| "loss": 0.6222, |
| "step": 905 |
| }, |
| { |
| "epoch": 2.8610526315789473, |
| "grad_norm": 0.5165517330169678, |
| "learning_rate": 5.969997218993328e-08, |
| "loss": 0.6335, |
| "step": 906 |
| }, |
| { |
| "epoch": 2.8642105263157895, |
| "grad_norm": 0.5130405426025391, |
| "learning_rate": 5.689629977123412e-08, |
| "loss": 0.602, |
| "step": 907 |
| }, |
| { |
| "epoch": 2.867368421052632, |
| "grad_norm": 0.5092564821243286, |
| "learning_rate": 5.415967768160946e-08, |
| "loss": 0.6221, |
| "step": 908 |
| }, |
| { |
| "epoch": 2.8705263157894736, |
| "grad_norm": 0.5033932328224182, |
| "learning_rate": 5.149014304174915e-08, |
| "loss": 0.632, |
| "step": 909 |
| }, |
| { |
| "epoch": 2.873684210526316, |
| "grad_norm": 0.5016679167747498, |
| "learning_rate": 4.8887732062337656e-08, |
| "loss": 0.61, |
| "step": 910 |
| }, |
| { |
| "epoch": 2.876842105263158, |
| "grad_norm": 0.5133535265922546, |
| "learning_rate": 4.635248004356885e-08, |
| "loss": 0.6702, |
| "step": 911 |
| }, |
| { |
| "epoch": 2.88, |
| "grad_norm": 0.496550977230072, |
| "learning_rate": 4.388442137466198e-08, |
| "loss": 0.6243, |
| "step": 912 |
| }, |
| { |
| "epoch": 2.883157894736842, |
| "grad_norm": 0.48129701614379883, |
| "learning_rate": 4.148358953339926e-08, |
| "loss": 0.6565, |
| "step": 913 |
| }, |
| { |
| "epoch": 2.8863157894736844, |
| "grad_norm": 0.5186068415641785, |
| "learning_rate": 3.9150017085669566e-08, |
| "loss": 0.63, |
| "step": 914 |
| }, |
| { |
| "epoch": 2.889473684210526, |
| "grad_norm": 0.5132560133934021, |
| "learning_rate": 3.688373568502601e-08, |
| "loss": 0.6137, |
| "step": 915 |
| }, |
| { |
| "epoch": 2.8926315789473684, |
| "grad_norm": 0.5437251329421997, |
| "learning_rate": 3.468477607226017e-08, |
| "loss": 0.6646, |
| "step": 916 |
| }, |
| { |
| "epoch": 2.8957894736842107, |
| "grad_norm": 0.5179529786109924, |
| "learning_rate": 3.255316807498077e-08, |
| "loss": 0.5848, |
| "step": 917 |
| }, |
| { |
| "epoch": 2.8989473684210525, |
| "grad_norm": 0.5003638863563538, |
| "learning_rate": 3.048894060721064e-08, |
| "loss": 0.6227, |
| "step": 918 |
| }, |
| { |
| "epoch": 2.9021052631578947, |
| "grad_norm": 0.5409918427467346, |
| "learning_rate": 2.8492121668997064e-08, |
| "loss": 0.6075, |
| "step": 919 |
| }, |
| { |
| "epoch": 2.905263157894737, |
| "grad_norm": 0.4994703531265259, |
| "learning_rate": 2.6562738346027627e-08, |
| "loss": 0.6214, |
| "step": 920 |
| }, |
| { |
| "epoch": 2.908421052631579, |
| "grad_norm": 0.5022985935211182, |
| "learning_rate": 2.4700816809266615e-08, |
| "loss": 0.6123, |
| "step": 921 |
| }, |
| { |
| "epoch": 2.911578947368421, |
| "grad_norm": 0.5189907550811768, |
| "learning_rate": 2.290638231459641e-08, |
| "loss": 0.5687, |
| "step": 922 |
| }, |
| { |
| "epoch": 2.9147368421052633, |
| "grad_norm": 0.4975310266017914, |
| "learning_rate": 2.1179459202479436e-08, |
| "loss": 0.6152, |
| "step": 923 |
| }, |
| { |
| "epoch": 2.917894736842105, |
| "grad_norm": 0.5323106050491333, |
| "learning_rate": 1.9520070897623976e-08, |
| "loss": 0.6638, |
| "step": 924 |
| }, |
| { |
| "epoch": 2.9210526315789473, |
| "grad_norm": 0.48443955183029175, |
| "learning_rate": 1.792823990866721e-08, |
| "loss": 0.6238, |
| "step": 925 |
| }, |
| { |
| "epoch": 2.9242105263157896, |
| "grad_norm": 0.48831799626350403, |
| "learning_rate": 1.640398782787267e-08, |
| "loss": 0.5758, |
| "step": 926 |
| }, |
| { |
| "epoch": 2.9273684210526314, |
| "grad_norm": 0.49946728348731995, |
| "learning_rate": 1.49473353308327e-08, |
| "loss": 0.6192, |
| "step": 927 |
| }, |
| { |
| "epoch": 2.9305263157894736, |
| "grad_norm": 0.5276715159416199, |
| "learning_rate": 1.3558302176192584e-08, |
| "loss": 0.6171, |
| "step": 928 |
| }, |
| { |
| "epoch": 2.933684210526316, |
| "grad_norm": 0.5295907855033875, |
| "learning_rate": 1.2236907205379623e-08, |
| "loss": 0.6198, |
| "step": 929 |
| }, |
| { |
| "epoch": 2.9368421052631577, |
| "grad_norm": 0.4946899712085724, |
| "learning_rate": 1.0983168342348915e-08, |
| "loss": 0.6253, |
| "step": 930 |
| }, |
| { |
| "epoch": 2.94, |
| "grad_norm": 0.5253959894180298, |
| "learning_rate": 9.797102593339659e-09, |
| "loss": 0.5993, |
| "step": 931 |
| }, |
| { |
| "epoch": 2.943157894736842, |
| "grad_norm": 0.5255020260810852, |
| "learning_rate": 8.678726046644215e-09, |
| "loss": 0.6384, |
| "step": 932 |
| }, |
| { |
| "epoch": 2.946315789473684, |
| "grad_norm": 0.5470498204231262, |
| "learning_rate": 7.628053872390517e-09, |
| "loss": 0.6002, |
| "step": 933 |
| }, |
| { |
| "epoch": 2.9494736842105262, |
| "grad_norm": 0.5109714865684509, |
| "learning_rate": 6.645100322336118e-09, |
| "loss": 0.6359, |
| "step": 934 |
| }, |
| { |
| "epoch": 2.9526315789473685, |
| "grad_norm": 0.517336368560791, |
| "learning_rate": 5.7298787296750094e-09, |
| "loss": 0.6493, |
| "step": 935 |
| }, |
| { |
| "epoch": 2.9557894736842103, |
| "grad_norm": 0.5175721049308777, |
| "learning_rate": 4.88240150885555e-09, |
| "loss": 0.6275, |
| "step": 936 |
| }, |
| { |
| "epoch": 2.9589473684210525, |
| "grad_norm": 0.52158522605896, |
| "learning_rate": 4.1026801554139296e-09, |
| "loss": 0.613, |
| "step": 937 |
| }, |
| { |
| "epoch": 2.962105263157895, |
| "grad_norm": 0.5218417048454285, |
| "learning_rate": 3.3907252458176277e-09, |
| "loss": 0.6094, |
| "step": 938 |
| }, |
| { |
| "epoch": 2.9652631578947366, |
| "grad_norm": 0.5241231322288513, |
| "learning_rate": 2.7465464373205296e-09, |
| "loss": 0.6365, |
| "step": 939 |
| }, |
| { |
| "epoch": 2.968421052631579, |
| "grad_norm": 0.5028926730155945, |
| "learning_rate": 2.1701524678346964e-09, |
| "loss": 0.5889, |
| "step": 940 |
| }, |
| { |
| "epoch": 2.971578947368421, |
| "grad_norm": 0.5517033934593201, |
| "learning_rate": 1.6615511558082386e-09, |
| "loss": 0.559, |
| "step": 941 |
| }, |
| { |
| "epoch": 2.974736842105263, |
| "grad_norm": 0.49616527557373047, |
| "learning_rate": 1.220749400123733e-09, |
| "loss": 0.6422, |
| "step": 942 |
| }, |
| { |
| "epoch": 2.977894736842105, |
| "grad_norm": 0.5219214558601379, |
| "learning_rate": 8.477531799999661e-10, |
| "loss": 0.6043, |
| "step": 943 |
| }, |
| { |
| "epoch": 2.9810526315789474, |
| "grad_norm": 0.5003503561019897, |
| "learning_rate": 5.425675549136645e-10, |
| "loss": 0.6027, |
| "step": 944 |
| }, |
| { |
| "epoch": 2.984210526315789, |
| "grad_norm": 0.5330973863601685, |
| "learning_rate": 3.051966645312154e-10, |
| "loss": 0.5806, |
| "step": 945 |
| }, |
| { |
| "epoch": 2.9873684210526315, |
| "grad_norm": 0.5317094326019287, |
| "learning_rate": 1.3564372865038088e-10, |
| "loss": 0.607, |
| "step": 946 |
| }, |
| { |
| "epoch": 2.9905263157894737, |
| "grad_norm": 0.5060402750968933, |
| "learning_rate": 3.3911047158663445e-11, |
| "loss": 0.6126, |
| "step": 947 |
| }, |
| { |
| "epoch": 2.993684210526316, |
| "grad_norm": 0.5056828260421753, |
| "learning_rate": 0.0, |
| "loss": 0.6224, |
| "step": 948 |
| }, |
| { |
| "epoch": 2.993684210526316, |
| "step": 948, |
| "total_flos": 8.371731904995656e+17, |
| "train_loss": 0.7066250008998541, |
| "train_runtime": 24790.5767, |
| "train_samples_per_second": 3.677, |
| "train_steps_per_second": 0.038 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 948, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 8.371731904995656e+17, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|