| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.0, | |
| "eval_steps": 500, | |
| "global_step": 388, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.005154639175257732, | |
| "grad_norm": 16.203728734181727, | |
| "learning_rate": 0.0, | |
| "loss": 0.782, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.010309278350515464, | |
| "grad_norm": 17.966080542459263, | |
| "learning_rate": 1.6949152542372883e-07, | |
| "loss": 0.8806, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.015463917525773196, | |
| "grad_norm": 19.39755361913842, | |
| "learning_rate": 3.3898305084745766e-07, | |
| "loss": 0.9713, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.020618556701030927, | |
| "grad_norm": 18.521681485723096, | |
| "learning_rate": 5.084745762711865e-07, | |
| "loss": 0.7594, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.02577319587628866, | |
| "grad_norm": 16.960097281692487, | |
| "learning_rate": 6.779661016949153e-07, | |
| "loss": 1.0095, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.030927835051546393, | |
| "grad_norm": 17.9303091015193, | |
| "learning_rate": 8.474576271186441e-07, | |
| "loss": 1.0024, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.03608247422680412, | |
| "grad_norm": 16.393493317929863, | |
| "learning_rate": 1.016949152542373e-06, | |
| "loss": 0.7936, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.041237113402061855, | |
| "grad_norm": 16.350575208232332, | |
| "learning_rate": 1.186440677966102e-06, | |
| "loss": 0.6727, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.04639175257731959, | |
| "grad_norm": 13.392733244736176, | |
| "learning_rate": 1.3559322033898307e-06, | |
| "loss": 0.6366, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.05154639175257732, | |
| "grad_norm": 13.99345418325563, | |
| "learning_rate": 1.5254237288135596e-06, | |
| "loss": 0.8352, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.05670103092783505, | |
| "grad_norm": 16.574632940102198, | |
| "learning_rate": 1.6949152542372882e-06, | |
| "loss": 0.8211, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.061855670103092786, | |
| "grad_norm": 7.432819145467484, | |
| "learning_rate": 1.8644067796610171e-06, | |
| "loss": 0.4671, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.06701030927835051, | |
| "grad_norm": 7.368792116734323, | |
| "learning_rate": 2.033898305084746e-06, | |
| "loss": 0.4617, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.07216494845360824, | |
| "grad_norm": 5.189955956808789, | |
| "learning_rate": 2.203389830508475e-06, | |
| "loss": 0.4118, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.07731958762886598, | |
| "grad_norm": 6.110418564319445, | |
| "learning_rate": 2.372881355932204e-06, | |
| "loss": 0.4772, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.08247422680412371, | |
| "grad_norm": 2.524741574511695, | |
| "learning_rate": 2.5423728813559323e-06, | |
| "loss": 0.1693, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.08762886597938144, | |
| "grad_norm": 4.093704704249702, | |
| "learning_rate": 2.7118644067796613e-06, | |
| "loss": 0.5946, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.09278350515463918, | |
| "grad_norm": 2.792475441128467, | |
| "learning_rate": 2.8813559322033903e-06, | |
| "loss": 0.3907, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.0979381443298969, | |
| "grad_norm": 2.8142697054484422, | |
| "learning_rate": 3.0508474576271192e-06, | |
| "loss": 0.4128, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.10309278350515463, | |
| "grad_norm": 3.0557753995059835, | |
| "learning_rate": 3.2203389830508473e-06, | |
| "loss": 0.4128, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.10824742268041238, | |
| "grad_norm": 2.407462983991879, | |
| "learning_rate": 3.3898305084745763e-06, | |
| "loss": 0.2454, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.1134020618556701, | |
| "grad_norm": 4.175164045570657, | |
| "learning_rate": 3.5593220338983053e-06, | |
| "loss": 0.5676, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.11855670103092783, | |
| "grad_norm": 3.2745984458679542, | |
| "learning_rate": 3.7288135593220342e-06, | |
| "loss": 0.3725, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.12371134020618557, | |
| "grad_norm": 2.630482014005189, | |
| "learning_rate": 3.898305084745763e-06, | |
| "loss": 0.284, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.12886597938144329, | |
| "grad_norm": 2.5818523655885657, | |
| "learning_rate": 4.067796610169492e-06, | |
| "loss": 0.3914, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.13402061855670103, | |
| "grad_norm": 2.1862545024847733, | |
| "learning_rate": 4.23728813559322e-06, | |
| "loss": 0.3245, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.13917525773195877, | |
| "grad_norm": 1.8029553321125102, | |
| "learning_rate": 4.40677966101695e-06, | |
| "loss": 0.1906, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.14432989690721648, | |
| "grad_norm": 2.1274587742776228, | |
| "learning_rate": 4.576271186440678e-06, | |
| "loss": 0.3129, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.14948453608247422, | |
| "grad_norm": 2.0553069128433985, | |
| "learning_rate": 4.745762711864408e-06, | |
| "loss": 0.362, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.15463917525773196, | |
| "grad_norm": 2.2636243270640812, | |
| "learning_rate": 4.915254237288136e-06, | |
| "loss": 0.3487, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.15979381443298968, | |
| "grad_norm": 2.0627428648390023, | |
| "learning_rate": 5.084745762711865e-06, | |
| "loss": 0.3017, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.16494845360824742, | |
| "grad_norm": 1.707507687529141, | |
| "learning_rate": 5.254237288135594e-06, | |
| "loss": 0.2379, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.17010309278350516, | |
| "grad_norm": 1.6433474151177994, | |
| "learning_rate": 5.423728813559323e-06, | |
| "loss": 0.2785, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.17525773195876287, | |
| "grad_norm": 1.8049400111968015, | |
| "learning_rate": 5.593220338983051e-06, | |
| "loss": 0.2046, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.18041237113402062, | |
| "grad_norm": 1.956624509674235, | |
| "learning_rate": 5.7627118644067805e-06, | |
| "loss": 0.2924, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.18556701030927836, | |
| "grad_norm": 2.043442546886233, | |
| "learning_rate": 5.932203389830509e-06, | |
| "loss": 0.2517, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.19072164948453607, | |
| "grad_norm": 1.7809412898084382, | |
| "learning_rate": 6.1016949152542385e-06, | |
| "loss": 0.214, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.1958762886597938, | |
| "grad_norm": 1.0871959933984234, | |
| "learning_rate": 6.271186440677966e-06, | |
| "loss": 0.0794, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.20103092783505155, | |
| "grad_norm": 1.6627701820242757, | |
| "learning_rate": 6.440677966101695e-06, | |
| "loss": 0.1529, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.20618556701030927, | |
| "grad_norm": 2.233281327693517, | |
| "learning_rate": 6.610169491525424e-06, | |
| "loss": 0.3918, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.211340206185567, | |
| "grad_norm": 2.4810527246233143, | |
| "learning_rate": 6.779661016949153e-06, | |
| "loss": 0.3276, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.21649484536082475, | |
| "grad_norm": 2.138313718866784, | |
| "learning_rate": 6.949152542372882e-06, | |
| "loss": 0.1874, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.22164948453608246, | |
| "grad_norm": 2.3207138440634445, | |
| "learning_rate": 7.1186440677966106e-06, | |
| "loss": 0.143, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.2268041237113402, | |
| "grad_norm": 1.806845922939825, | |
| "learning_rate": 7.288135593220339e-06, | |
| "loss": 0.2639, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.23195876288659795, | |
| "grad_norm": 1.6856465858879643, | |
| "learning_rate": 7.4576271186440685e-06, | |
| "loss": 0.1803, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.23711340206185566, | |
| "grad_norm": 1.8626644402225192, | |
| "learning_rate": 7.627118644067797e-06, | |
| "loss": 0.2046, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.2422680412371134, | |
| "grad_norm": 1.7027465927641543, | |
| "learning_rate": 7.796610169491526e-06, | |
| "loss": 0.124, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.24742268041237114, | |
| "grad_norm": 1.9041870166779817, | |
| "learning_rate": 7.966101694915255e-06, | |
| "loss": 0.1982, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.25257731958762886, | |
| "grad_norm": 1.770069442877448, | |
| "learning_rate": 8.135593220338983e-06, | |
| "loss": 0.2813, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.25773195876288657, | |
| "grad_norm": 1.953365961357983, | |
| "learning_rate": 8.305084745762712e-06, | |
| "loss": 0.2251, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.26288659793814434, | |
| "grad_norm": 2.002309230380239, | |
| "learning_rate": 8.47457627118644e-06, | |
| "loss": 0.2785, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.26804123711340205, | |
| "grad_norm": 1.5522236927921595, | |
| "learning_rate": 8.64406779661017e-06, | |
| "loss": 0.2388, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.27319587628865977, | |
| "grad_norm": 1.4065957481275269, | |
| "learning_rate": 8.8135593220339e-06, | |
| "loss": 0.1592, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.27835051546391754, | |
| "grad_norm": 1.344843484282986, | |
| "learning_rate": 8.983050847457628e-06, | |
| "loss": 0.184, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.28350515463917525, | |
| "grad_norm": 1.8809824575076326, | |
| "learning_rate": 9.152542372881356e-06, | |
| "loss": 0.1559, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.28865979381443296, | |
| "grad_norm": 1.554740329023379, | |
| "learning_rate": 9.322033898305085e-06, | |
| "loss": 0.1945, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.29381443298969073, | |
| "grad_norm": 1.7706197688983198, | |
| "learning_rate": 9.491525423728815e-06, | |
| "loss": 0.2799, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.29896907216494845, | |
| "grad_norm": 1.339143828253002, | |
| "learning_rate": 9.661016949152544e-06, | |
| "loss": 0.1094, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.30412371134020616, | |
| "grad_norm": 2.1220725285103086, | |
| "learning_rate": 9.830508474576272e-06, | |
| "loss": 0.3211, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.30927835051546393, | |
| "grad_norm": 1.3971936674913341, | |
| "learning_rate": 1e-05, | |
| "loss": 0.2081, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.31443298969072164, | |
| "grad_norm": 1.8165010693502677, | |
| "learning_rate": 9.999909794073715e-06, | |
| "loss": 0.191, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.31958762886597936, | |
| "grad_norm": 1.8206712917879821, | |
| "learning_rate": 9.999639179549699e-06, | |
| "loss": 0.1817, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.3247422680412371, | |
| "grad_norm": 2.1904370888969704, | |
| "learning_rate": 9.999188166192368e-06, | |
| "loss": 0.1933, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.32989690721649484, | |
| "grad_norm": 1.356123942482486, | |
| "learning_rate": 9.998556770275351e-06, | |
| "loss": 0.1529, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.33505154639175255, | |
| "grad_norm": 1.4167068373050817, | |
| "learning_rate": 9.997745014580912e-06, | |
| "loss": 0.2182, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.3402061855670103, | |
| "grad_norm": 1.2778114297184384, | |
| "learning_rate": 9.996752928399121e-06, | |
| "loss": 0.1231, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.34536082474226804, | |
| "grad_norm": 1.4693549048926278, | |
| "learning_rate": 9.995580547526798e-06, | |
| "loss": 0.2222, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.35051546391752575, | |
| "grad_norm": 1.4349642188073422, | |
| "learning_rate": 9.994227914266222e-06, | |
| "loss": 0.1934, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.3556701030927835, | |
| "grad_norm": 1.3639498444936125, | |
| "learning_rate": 9.992695077423609e-06, | |
| "loss": 0.1707, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.36082474226804123, | |
| "grad_norm": 1.3943030751682663, | |
| "learning_rate": 9.990982092307347e-06, | |
| "loss": 0.1716, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.36597938144329895, | |
| "grad_norm": 1.4616151628165952, | |
| "learning_rate": 9.989089020725999e-06, | |
| "loss": 0.2363, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.3711340206185567, | |
| "grad_norm": 1.7297954033170302, | |
| "learning_rate": 9.987015930986074e-06, | |
| "loss": 0.0612, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.37628865979381443, | |
| "grad_norm": 1.4130551566794556, | |
| "learning_rate": 9.984762897889568e-06, | |
| "loss": 0.2438, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.38144329896907214, | |
| "grad_norm": 1.4316589582684316, | |
| "learning_rate": 9.98233000273125e-06, | |
| "loss": 0.2529, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.3865979381443299, | |
| "grad_norm": 1.4921368045815033, | |
| "learning_rate": 9.97971733329575e-06, | |
| "loss": 0.2033, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.3917525773195876, | |
| "grad_norm": 1.6884913833837993, | |
| "learning_rate": 9.97692498385437e-06, | |
| "loss": 0.3251, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.39690721649484534, | |
| "grad_norm": 1.2618275142031115, | |
| "learning_rate": 9.973953055161702e-06, | |
| "loss": 0.2173, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.4020618556701031, | |
| "grad_norm": 1.2058891275808017, | |
| "learning_rate": 9.970801654451974e-06, | |
| "loss": 0.1337, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.4072164948453608, | |
| "grad_norm": 1.3014010097389481, | |
| "learning_rate": 9.967470895435197e-06, | |
| "loss": 0.2217, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.41237113402061853, | |
| "grad_norm": 1.0907558669955588, | |
| "learning_rate": 9.963960898293049e-06, | |
| "loss": 0.1166, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.4175257731958763, | |
| "grad_norm": 1.3508925279475854, | |
| "learning_rate": 9.96027178967455e-06, | |
| "loss": 0.2185, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.422680412371134, | |
| "grad_norm": 1.8405638710013943, | |
| "learning_rate": 9.956403702691482e-06, | |
| "loss": 0.2981, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.42783505154639173, | |
| "grad_norm": 0.9981831988732881, | |
| "learning_rate": 9.952356776913594e-06, | |
| "loss": 0.1471, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.4329896907216495, | |
| "grad_norm": 1.5438578399853147, | |
| "learning_rate": 9.948131158363564e-06, | |
| "loss": 0.2219, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.4381443298969072, | |
| "grad_norm": 1.373967099765683, | |
| "learning_rate": 9.943726999511721e-06, | |
| "loss": 0.2625, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.44329896907216493, | |
| "grad_norm": 1.5045665327970632, | |
| "learning_rate": 9.939144459270557e-06, | |
| "loss": 0.2378, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.4484536082474227, | |
| "grad_norm": 1.2348801218707532, | |
| "learning_rate": 9.934383702988992e-06, | |
| "loss": 0.1299, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.4536082474226804, | |
| "grad_norm": 1.3457854200982862, | |
| "learning_rate": 9.929444902446392e-06, | |
| "loss": 0.2193, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.4587628865979381, | |
| "grad_norm": 1.5974941630503592, | |
| "learning_rate": 9.924328235846393e-06, | |
| "loss": 0.3447, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.4639175257731959, | |
| "grad_norm": 1.7921462524347982, | |
| "learning_rate": 9.919033887810451e-06, | |
| "loss": 0.3081, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.4690721649484536, | |
| "grad_norm": 1.0544333428105994, | |
| "learning_rate": 9.913562049371196e-06, | |
| "loss": 0.1723, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.4742268041237113, | |
| "grad_norm": 1.173180659355975, | |
| "learning_rate": 9.90791291796553e-06, | |
| "loss": 0.1816, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.4793814432989691, | |
| "grad_norm": 1.2343501847799747, | |
| "learning_rate": 9.902086697427504e-06, | |
| "loss": 0.2048, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.4845360824742268, | |
| "grad_norm": 1.4166315003510848, | |
| "learning_rate": 9.896083597980968e-06, | |
| "loss": 0.1611, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.4896907216494845, | |
| "grad_norm": 0.9316258247144923, | |
| "learning_rate": 9.88990383623198e-06, | |
| "loss": 0.1307, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.4948453608247423, | |
| "grad_norm": 1.1284196528102999, | |
| "learning_rate": 9.883547635160991e-06, | |
| "loss": 0.1816, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 0.9136263992805103, | |
| "learning_rate": 9.877015224114806e-06, | |
| "loss": 0.1173, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.5051546391752577, | |
| "grad_norm": 1.5233730471599805, | |
| "learning_rate": 9.870306838798299e-06, | |
| "loss": 0.2239, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.5103092783505154, | |
| "grad_norm": 1.4031394060036828, | |
| "learning_rate": 9.863422721265913e-06, | |
| "loss": 0.2398, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.5154639175257731, | |
| "grad_norm": 1.12765997685746, | |
| "learning_rate": 9.856363119912931e-06, | |
| "loss": 0.1318, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.520618556701031, | |
| "grad_norm": 0.8420145818838017, | |
| "learning_rate": 9.849128289466503e-06, | |
| "loss": 0.1246, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.5257731958762887, | |
| "grad_norm": 0.946095131905409, | |
| "learning_rate": 9.841718490976461e-06, | |
| "loss": 0.1241, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.5309278350515464, | |
| "grad_norm": 0.9796883673872017, | |
| "learning_rate": 9.8341339918059e-06, | |
| "loss": 0.0955, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.5360824742268041, | |
| "grad_norm": 1.1930460993410585, | |
| "learning_rate": 9.826375065621533e-06, | |
| "loss": 0.1646, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.5412371134020618, | |
| "grad_norm": 1.27651192729961, | |
| "learning_rate": 9.818441992383802e-06, | |
| "loss": 0.1566, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.5463917525773195, | |
| "grad_norm": 1.1315939745886057, | |
| "learning_rate": 9.810335058336801e-06, | |
| "loss": 0.1786, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.5515463917525774, | |
| "grad_norm": 0.9355015058700618, | |
| "learning_rate": 9.802054555997927e-06, | |
| "loss": 0.0945, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.5567010309278351, | |
| "grad_norm": 1.7639349345635655, | |
| "learning_rate": 9.79360078414733e-06, | |
| "loss": 0.254, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.5618556701030928, | |
| "grad_norm": 1.3663266258683646, | |
| "learning_rate": 9.784974047817142e-06, | |
| "loss": 0.1953, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.5670103092783505, | |
| "grad_norm": 1.441702754636908, | |
| "learning_rate": 9.776174658280458e-06, | |
| "loss": 0.1841, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.5721649484536082, | |
| "grad_norm": 1.1499799893613811, | |
| "learning_rate": 9.767202933040111e-06, | |
| "loss": 0.1997, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.5773195876288659, | |
| "grad_norm": 1.2076330399094353, | |
| "learning_rate": 9.758059195817216e-06, | |
| "loss": 0.1768, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.5824742268041238, | |
| "grad_norm": 0.878277090541755, | |
| "learning_rate": 9.748743776539489e-06, | |
| "loss": 0.1173, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.5876288659793815, | |
| "grad_norm": 1.0228670375234183, | |
| "learning_rate": 9.739257011329336e-06, | |
| "loss": 0.1361, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.5927835051546392, | |
| "grad_norm": 1.4761049929099612, | |
| "learning_rate": 9.729599242491738e-06, | |
| "loss": 0.2792, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.5979381443298969, | |
| "grad_norm": 1.0296746075367031, | |
| "learning_rate": 9.719770818501885e-06, | |
| "loss": 0.1, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.6030927835051546, | |
| "grad_norm": 0.9546648084132865, | |
| "learning_rate": 9.709772093992619e-06, | |
| "loss": 0.1072, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.6082474226804123, | |
| "grad_norm": 0.9359977344927911, | |
| "learning_rate": 9.699603429741615e-06, | |
| "loss": 0.1171, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.6134020618556701, | |
| "grad_norm": 1.2912642891989952, | |
| "learning_rate": 9.689265192658387e-06, | |
| "loss": 0.1783, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.6185567010309279, | |
| "grad_norm": 1.1169115856391039, | |
| "learning_rate": 9.67875775577104e-06, | |
| "loss": 0.2051, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.6237113402061856, | |
| "grad_norm": 1.4787464277701932, | |
| "learning_rate": 9.668081498212799e-06, | |
| "loss": 0.2377, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.6288659793814433, | |
| "grad_norm": 0.9403827546416947, | |
| "learning_rate": 9.657236805208347e-06, | |
| "loss": 0.1219, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.634020618556701, | |
| "grad_norm": 0.7358413148522787, | |
| "learning_rate": 9.646224068059917e-06, | |
| "loss": 0.0895, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.6391752577319587, | |
| "grad_norm": 1.1530465641868872, | |
| "learning_rate": 9.63504368413317e-06, | |
| "loss": 0.2149, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.6443298969072165, | |
| "grad_norm": 1.4217244985998034, | |
| "learning_rate": 9.62369605684286e-06, | |
| "loss": 0.197, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.6494845360824743, | |
| "grad_norm": 1.910749720256578, | |
| "learning_rate": 9.612181595638279e-06, | |
| "loss": 0.2508, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.654639175257732, | |
| "grad_norm": 1.2757659427730983, | |
| "learning_rate": 9.600500715988486e-06, | |
| "loss": 0.2809, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.6597938144329897, | |
| "grad_norm": 1.0891754027329807, | |
| "learning_rate": 9.588653839367304e-06, | |
| "loss": 0.165, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.6649484536082474, | |
| "grad_norm": 1.293429113847632, | |
| "learning_rate": 9.576641393238129e-06, | |
| "loss": 0.0855, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.6701030927835051, | |
| "grad_norm": 1.4599213966596118, | |
| "learning_rate": 9.564463811038489e-06, | |
| "loss": 0.2503, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.6752577319587629, | |
| "grad_norm": 1.3648021125385577, | |
| "learning_rate": 9.55212153216442e-06, | |
| "loss": 0.1794, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.6804123711340206, | |
| "grad_norm": 0.9809648930976671, | |
| "learning_rate": 9.5396150019546e-06, | |
| "loss": 0.0749, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.6855670103092784, | |
| "grad_norm": 1.2479101653995877, | |
| "learning_rate": 9.526944671674287e-06, | |
| "loss": 0.1705, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.6907216494845361, | |
| "grad_norm": 1.180767703263976, | |
| "learning_rate": 9.514110998499032e-06, | |
| "loss": 0.2419, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.6958762886597938, | |
| "grad_norm": 0.9973862175809532, | |
| "learning_rate": 9.501114445498183e-06, | |
| "loss": 0.1545, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.7010309278350515, | |
| "grad_norm": 0.9580969711134073, | |
| "learning_rate": 9.487955481618184e-06, | |
| "loss": 0.1563, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.7061855670103093, | |
| "grad_norm": 1.0576903096823542, | |
| "learning_rate": 9.474634581665645e-06, | |
| "loss": 0.128, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.711340206185567, | |
| "grad_norm": 1.1608177511918483, | |
| "learning_rate": 9.461152226290212e-06, | |
| "loss": 0.2324, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.7164948453608248, | |
| "grad_norm": 0.7247855062304921, | |
| "learning_rate": 9.44750890196723e-06, | |
| "loss": 0.0696, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.7216494845360825, | |
| "grad_norm": 1.294941334572682, | |
| "learning_rate": 9.43370510098018e-06, | |
| "loss": 0.2068, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.7268041237113402, | |
| "grad_norm": 1.5307061314891508, | |
| "learning_rate": 9.419741321402923e-06, | |
| "loss": 0.134, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.7319587628865979, | |
| "grad_norm": 1.724248389396898, | |
| "learning_rate": 9.405618067081729e-06, | |
| "loss": 0.3587, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.7371134020618557, | |
| "grad_norm": 0.7880104208217124, | |
| "learning_rate": 9.391335847617093e-06, | |
| "loss": 0.1081, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.7422680412371134, | |
| "grad_norm": 1.176218642717777, | |
| "learning_rate": 9.37689517834535e-06, | |
| "loss": 0.2552, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.7474226804123711, | |
| "grad_norm": 0.9007810641742303, | |
| "learning_rate": 9.362296580320078e-06, | |
| "loss": 0.1411, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.7525773195876289, | |
| "grad_norm": 1.0987174332049452, | |
| "learning_rate": 9.347540580293301e-06, | |
| "loss": 0.1984, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.7577319587628866, | |
| "grad_norm": 0.9593956667801066, | |
| "learning_rate": 9.332627710696477e-06, | |
| "loss": 0.1403, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.7628865979381443, | |
| "grad_norm": 1.5203788468340629, | |
| "learning_rate": 9.317558509621297e-06, | |
| "loss": 0.3035, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.7680412371134021, | |
| "grad_norm": 0.9687515693670804, | |
| "learning_rate": 9.302333520800253e-06, | |
| "loss": 0.1366, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.7731958762886598, | |
| "grad_norm": 0.7832678330612435, | |
| "learning_rate": 9.286953293587035e-06, | |
| "loss": 0.11, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.7783505154639175, | |
| "grad_norm": 0.9289801978987015, | |
| "learning_rate": 9.271418382936697e-06, | |
| "loss": 0.0988, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.7835051546391752, | |
| "grad_norm": 1.1356540696916577, | |
| "learning_rate": 9.255729349385645e-06, | |
| "loss": 0.1375, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.788659793814433, | |
| "grad_norm": 1.3495379294095782, | |
| "learning_rate": 9.239886759031399e-06, | |
| "loss": 0.3033, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.7938144329896907, | |
| "grad_norm": 1.2612818337900333, | |
| "learning_rate": 9.223891183512174e-06, | |
| "loss": 0.1364, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.7989690721649485, | |
| "grad_norm": 1.1325852394462053, | |
| "learning_rate": 9.207743199986252e-06, | |
| "loss": 0.1559, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.8041237113402062, | |
| "grad_norm": 0.6629329908683664, | |
| "learning_rate": 9.191443391111157e-06, | |
| "loss": 0.0678, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.8092783505154639, | |
| "grad_norm": 1.184280051526035, | |
| "learning_rate": 9.174992345022636e-06, | |
| "loss": 0.1762, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.8144329896907216, | |
| "grad_norm": 1.4363833078261825, | |
| "learning_rate": 9.158390655313422e-06, | |
| "loss": 0.2509, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.8195876288659794, | |
| "grad_norm": 1.4851296678460735, | |
| "learning_rate": 9.141638921011842e-06, | |
| "loss": 0.3053, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.8247422680412371, | |
| "grad_norm": 0.9939630731717258, | |
| "learning_rate": 9.124737746560175e-06, | |
| "loss": 0.0952, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.8298969072164949, | |
| "grad_norm": 1.2169101845107388, | |
| "learning_rate": 9.107687741792863e-06, | |
| "loss": 0.1494, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.8350515463917526, | |
| "grad_norm": 1.3295674637794006, | |
| "learning_rate": 9.090489521914492e-06, | |
| "loss": 0.2761, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.8402061855670103, | |
| "grad_norm": 1.2793707854001373, | |
| "learning_rate": 9.073143707477607e-06, | |
| "loss": 0.1433, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.845360824742268, | |
| "grad_norm": 1.2039862398086603, | |
| "learning_rate": 9.055650924360308e-06, | |
| "loss": 0.2154, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.8505154639175257, | |
| "grad_norm": 1.1250244597985437, | |
| "learning_rate": 9.038011803743679e-06, | |
| "loss": 0.1817, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.8556701030927835, | |
| "grad_norm": 1.1063531458560925, | |
| "learning_rate": 9.020226982089005e-06, | |
| "loss": 0.1564, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.8608247422680413, | |
| "grad_norm": 1.1306904530717008, | |
| "learning_rate": 9.002297101114813e-06, | |
| "loss": 0.1349, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.865979381443299, | |
| "grad_norm": 1.0467505323127504, | |
| "learning_rate": 8.984222807773707e-06, | |
| "loss": 0.1813, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.8711340206185567, | |
| "grad_norm": 1.715578103301857, | |
| "learning_rate": 8.966004754229037e-06, | |
| "loss": 0.3016, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.8762886597938144, | |
| "grad_norm": 0.9646361196653148, | |
| "learning_rate": 8.947643597831365e-06, | |
| "loss": 0.0727, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.8814432989690721, | |
| "grad_norm": 0.9218462690369367, | |
| "learning_rate": 8.929140001094734e-06, | |
| "loss": 0.1584, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.8865979381443299, | |
| "grad_norm": 0.9892292920559456, | |
| "learning_rate": 8.910494631672783e-06, | |
| "loss": 0.1449, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.8917525773195877, | |
| "grad_norm": 1.0579410641324796, | |
| "learning_rate": 8.891708162334635e-06, | |
| "loss": 0.156, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.8969072164948454, | |
| "grad_norm": 0.7647207044009656, | |
| "learning_rate": 8.87278127094064e-06, | |
| "loss": 0.1107, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.9020618556701031, | |
| "grad_norm": 1.1371579318359795, | |
| "learning_rate": 8.853714640417906e-06, | |
| "loss": 0.1844, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.9072164948453608, | |
| "grad_norm": 0.7724480629708496, | |
| "learning_rate": 8.834508958735656e-06, | |
| "loss": 0.0826, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.9123711340206185, | |
| "grad_norm": 1.0272763923631063, | |
| "learning_rate": 8.815164918880418e-06, | |
| "loss": 0.1678, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.9175257731958762, | |
| "grad_norm": 1.2865546631779095, | |
| "learning_rate": 8.795683218831002e-06, | |
| "loss": 0.2714, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.9226804123711341, | |
| "grad_norm": 1.8028031060416778, | |
| "learning_rate": 8.776064561533329e-06, | |
| "loss": 0.2045, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.9278350515463918, | |
| "grad_norm": 1.140464084681919, | |
| "learning_rate": 8.756309654875059e-06, | |
| "loss": 0.3075, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.9329896907216495, | |
| "grad_norm": 1.1867468428900187, | |
| "learning_rate": 8.736419211660054e-06, | |
| "loss": 0.2445, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.9381443298969072, | |
| "grad_norm": 1.2294904183688709, | |
| "learning_rate": 8.716393949582656e-06, | |
| "loss": 0.2416, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.9432989690721649, | |
| "grad_norm": 0.7483697175175423, | |
| "learning_rate": 8.696234591201793e-06, | |
| "loss": 0.1126, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.9484536082474226, | |
| "grad_norm": 0.9739357119214708, | |
| "learning_rate": 8.6759418639149e-06, | |
| "loss": 0.167, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.9536082474226805, | |
| "grad_norm": 1.4251293577602404, | |
| "learning_rate": 8.655516499931684e-06, | |
| "loss": 0.3314, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.9587628865979382, | |
| "grad_norm": 1.140569606339371, | |
| "learning_rate": 8.634959236247695e-06, | |
| "loss": 0.2206, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.9639175257731959, | |
| "grad_norm": 0.9214436194181765, | |
| "learning_rate": 8.61427081461774e-06, | |
| "loss": 0.1622, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.9690721649484536, | |
| "grad_norm": 1.4025795614332346, | |
| "learning_rate": 8.593451981529109e-06, | |
| "loss": 0.2202, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.9742268041237113, | |
| "grad_norm": 0.7327985656704634, | |
| "learning_rate": 8.572503488174655e-06, | |
| "loss": 0.0798, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.979381443298969, | |
| "grad_norm": 1.1641896784338794, | |
| "learning_rate": 8.551426090425678e-06, | |
| "loss": 0.1978, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.9845360824742269, | |
| "grad_norm": 1.086213303235639, | |
| "learning_rate": 8.53022054880465e-06, | |
| "loss": 0.1558, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.9896907216494846, | |
| "grad_norm": 1.0472347005656282, | |
| "learning_rate": 8.508887628457783e-06, | |
| "loss": 0.2053, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.9948453608247423, | |
| "grad_norm": 0.7910662337183756, | |
| "learning_rate": 8.487428099127411e-06, | |
| "loss": 0.0927, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 1.1006430197444728, | |
| "learning_rate": 8.465842735124224e-06, | |
| "loss": 0.1894, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 1.0051546391752577, | |
| "grad_norm": 0.5906742201713365, | |
| "learning_rate": 8.444132315299321e-06, | |
| "loss": 0.0537, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 1.0103092783505154, | |
| "grad_norm": 0.6751125368476019, | |
| "learning_rate": 8.422297623016118e-06, | |
| "loss": 0.0806, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 1.0154639175257731, | |
| "grad_norm": 1.0260689629426207, | |
| "learning_rate": 8.40033944612207e-06, | |
| "loss": 0.1132, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 1.0206185567010309, | |
| "grad_norm": 0.5551436381218386, | |
| "learning_rate": 8.378258576920253e-06, | |
| "loss": 0.0523, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 1.0257731958762886, | |
| "grad_norm": 0.6869421294596499, | |
| "learning_rate": 8.356055812140768e-06, | |
| "loss": 0.0907, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 1.0309278350515463, | |
| "grad_norm": 0.9886419404257938, | |
| "learning_rate": 8.333731952912e-06, | |
| "loss": 0.1147, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.0360824742268042, | |
| "grad_norm": 0.8201901345855567, | |
| "learning_rate": 8.311287804731716e-06, | |
| "loss": 0.053, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 1.041237113402062, | |
| "grad_norm": 0.6231211273936398, | |
| "learning_rate": 8.288724177437976e-06, | |
| "loss": 0.0558, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 1.0463917525773196, | |
| "grad_norm": 1.0979347326591262, | |
| "learning_rate": 8.266041885179949e-06, | |
| "loss": 0.1248, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 1.0515463917525774, | |
| "grad_norm": 1.0441782376710624, | |
| "learning_rate": 8.243241746388504e-06, | |
| "loss": 0.1024, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 1.056701030927835, | |
| "grad_norm": 0.8617708313835702, | |
| "learning_rate": 8.220324583746697e-06, | |
| "loss": 0.0864, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 1.0618556701030928, | |
| "grad_norm": 0.8219004811721925, | |
| "learning_rate": 8.197291224160082e-06, | |
| "loss": 0.0965, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 1.0670103092783505, | |
| "grad_norm": 1.446199673821668, | |
| "learning_rate": 8.174142498726875e-06, | |
| "loss": 0.1316, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 1.0721649484536082, | |
| "grad_norm": 0.9970004312056989, | |
| "learning_rate": 8.150879242707963e-06, | |
| "loss": 0.0785, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 1.077319587628866, | |
| "grad_norm": 0.6799947961502295, | |
| "learning_rate": 8.127502295496768e-06, | |
| "loss": 0.0546, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 1.0824742268041236, | |
| "grad_norm": 0.8459778750809145, | |
| "learning_rate": 8.104012500588962e-06, | |
| "loss": 0.0754, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 1.0876288659793814, | |
| "grad_norm": 0.8764373407801908, | |
| "learning_rate": 8.080410705552028e-06, | |
| "loss": 0.0725, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 1.0927835051546393, | |
| "grad_norm": 1.4250917004738353, | |
| "learning_rate": 8.056697761994679e-06, | |
| "loss": 0.1466, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 1.097938144329897, | |
| "grad_norm": 0.7470601320144464, | |
| "learning_rate": 8.032874525536132e-06, | |
| "loss": 0.0554, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 1.1030927835051547, | |
| "grad_norm": 0.7217791614950412, | |
| "learning_rate": 8.008941855775228e-06, | |
| "loss": 0.104, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 1.1082474226804124, | |
| "grad_norm": 1.018751301091875, | |
| "learning_rate": 7.98490061625943e-06, | |
| "loss": 0.1098, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 1.1134020618556701, | |
| "grad_norm": 0.8577713345259472, | |
| "learning_rate": 7.960751674453644e-06, | |
| "loss": 0.106, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 1.1185567010309279, | |
| "grad_norm": 0.7354671182870788, | |
| "learning_rate": 7.93649590170894e-06, | |
| "loss": 0.0776, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 1.1237113402061856, | |
| "grad_norm": 0.784080342118224, | |
| "learning_rate": 7.912134173231099e-06, | |
| "loss": 0.1065, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 1.1288659793814433, | |
| "grad_norm": 1.0447979386760058, | |
| "learning_rate": 7.887667368049028e-06, | |
| "loss": 0.1501, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 1.134020618556701, | |
| "grad_norm": 0.8251703086624239, | |
| "learning_rate": 7.863096368983061e-06, | |
| "loss": 0.0888, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.1391752577319587, | |
| "grad_norm": 0.8822362088146911, | |
| "learning_rate": 7.838422062613088e-06, | |
| "loss": 0.093, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 1.1443298969072164, | |
| "grad_norm": 0.8949834524497724, | |
| "learning_rate": 7.813645339246578e-06, | |
| "loss": 0.0921, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 1.1494845360824741, | |
| "grad_norm": 0.5374131970068409, | |
| "learning_rate": 7.78876709288644e-06, | |
| "loss": 0.0386, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 1.1546391752577319, | |
| "grad_norm": 0.7564742772338368, | |
| "learning_rate": 7.763788221198775e-06, | |
| "loss": 0.0948, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 1.1597938144329896, | |
| "grad_norm": 0.8808313875725176, | |
| "learning_rate": 7.738709625480494e-06, | |
| "loss": 0.092, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 1.1649484536082475, | |
| "grad_norm": 0.6478248778597727, | |
| "learning_rate": 7.713532210626771e-06, | |
| "loss": 0.0458, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 1.1701030927835052, | |
| "grad_norm": 0.9288097880385096, | |
| "learning_rate": 7.68825688509842e-06, | |
| "loss": 0.0894, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 1.175257731958763, | |
| "grad_norm": 0.714867355071131, | |
| "learning_rate": 7.662884560889106e-06, | |
| "loss": 0.0541, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 1.1804123711340206, | |
| "grad_norm": 0.6642395918740392, | |
| "learning_rate": 7.637416153492426e-06, | |
| "loss": 0.0663, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 1.1855670103092784, | |
| "grad_norm": 0.9527549900666177, | |
| "learning_rate": 7.611852581868895e-06, | |
| "loss": 0.1443, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 1.190721649484536, | |
| "grad_norm": 0.7893883831371513, | |
| "learning_rate": 7.586194768412778e-06, | |
| "loss": 0.0872, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 1.1958762886597938, | |
| "grad_norm": 0.9884146330428806, | |
| "learning_rate": 7.560443638918801e-06, | |
| "loss": 0.0821, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 1.2010309278350515, | |
| "grad_norm": 0.7413524893904028, | |
| "learning_rate": 7.534600122548766e-06, | |
| "loss": 0.1001, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 1.2061855670103092, | |
| "grad_norm": 0.6487178792290845, | |
| "learning_rate": 7.508665151798e-06, | |
| "loss": 0.0669, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 1.211340206185567, | |
| "grad_norm": 1.2033223139705194, | |
| "learning_rate": 7.482639662461731e-06, | |
| "loss": 0.1102, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 1.2164948453608249, | |
| "grad_norm": 1.1496624331229564, | |
| "learning_rate": 7.456524593601306e-06, | |
| "loss": 0.0817, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 1.2216494845360826, | |
| "grad_norm": 0.8048271397849805, | |
| "learning_rate": 7.430320887510319e-06, | |
| "loss": 0.066, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 1.2268041237113403, | |
| "grad_norm": 1.0865257986253214, | |
| "learning_rate": 7.404029489680597e-06, | |
| "loss": 0.1054, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 1.231958762886598, | |
| "grad_norm": 0.9118222773920444, | |
| "learning_rate": 7.377651348768102e-06, | |
| "loss": 0.0734, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 1.2371134020618557, | |
| "grad_norm": 0.7111948595434698, | |
| "learning_rate": 7.351187416558686e-06, | |
| "loss": 0.0455, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.2422680412371134, | |
| "grad_norm": 1.1586255328358697, | |
| "learning_rate": 7.324638647933756e-06, | |
| "loss": 0.1247, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 1.2474226804123711, | |
| "grad_norm": 0.7885336165369894, | |
| "learning_rate": 7.29800600083582e-06, | |
| "loss": 0.0944, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 1.2525773195876289, | |
| "grad_norm": 0.815328204346094, | |
| "learning_rate": 7.2712904362339155e-06, | |
| "loss": 0.0846, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 1.2577319587628866, | |
| "grad_norm": 1.044831188842817, | |
| "learning_rate": 7.244492918088946e-06, | |
| "loss": 0.1029, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 1.2628865979381443, | |
| "grad_norm": 1.1074505493277451, | |
| "learning_rate": 7.217614413318887e-06, | |
| "loss": 0.1109, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 1.268041237113402, | |
| "grad_norm": 0.836136628649968, | |
| "learning_rate": 7.19065589176391e-06, | |
| "loss": 0.1074, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 1.2731958762886597, | |
| "grad_norm": 0.9806183729514004, | |
| "learning_rate": 7.1636183261513784e-06, | |
| "loss": 0.1046, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 1.2783505154639174, | |
| "grad_norm": 0.8666879067703104, | |
| "learning_rate": 7.136502692060746e-06, | |
| "loss": 0.0826, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 1.2835051546391751, | |
| "grad_norm": 0.801232774030085, | |
| "learning_rate": 7.109309967888376e-06, | |
| "loss": 0.0556, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 1.2886597938144329, | |
| "grad_norm": 1.1897623094936245, | |
| "learning_rate": 7.0820411348122144e-06, | |
| "loss": 0.154, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.2938144329896908, | |
| "grad_norm": 0.8276583732971441, | |
| "learning_rate": 7.0546971767564e-06, | |
| "loss": 0.0833, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 1.2989690721649485, | |
| "grad_norm": 0.7583663543944267, | |
| "learning_rate": 7.027279080355756e-06, | |
| "loss": 0.0499, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 1.3041237113402062, | |
| "grad_norm": 0.8649391169592322, | |
| "learning_rate": 6.999787834920202e-06, | |
| "loss": 0.0867, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 1.309278350515464, | |
| "grad_norm": 1.0915318471452742, | |
| "learning_rate": 6.972224432399038e-06, | |
| "loss": 0.0676, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 1.3144329896907216, | |
| "grad_norm": 1.0231154625513728, | |
| "learning_rate": 6.9445898673451635e-06, | |
| "loss": 0.1028, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 1.3195876288659794, | |
| "grad_norm": 0.933663875875191, | |
| "learning_rate": 6.916885136879197e-06, | |
| "loss": 0.0967, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 1.324742268041237, | |
| "grad_norm": 0.8912988511114369, | |
| "learning_rate": 6.889111240653488e-06, | |
| "loss": 0.0959, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 1.3298969072164948, | |
| "grad_norm": 0.8687955070867821, | |
| "learning_rate": 6.861269180816052e-06, | |
| "loss": 0.0977, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 1.3350515463917525, | |
| "grad_norm": 0.7106326271334324, | |
| "learning_rate": 6.833359961974406e-06, | |
| "loss": 0.0869, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 1.3402061855670104, | |
| "grad_norm": 0.9501736705275039, | |
| "learning_rate": 6.805384591159325e-06, | |
| "loss": 0.0949, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.3453608247422681, | |
| "grad_norm": 0.804663253737915, | |
| "learning_rate": 6.7773440777885055e-06, | |
| "loss": 0.0811, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 1.3505154639175259, | |
| "grad_norm": 0.6042006243936727, | |
| "learning_rate": 6.749239433630137e-06, | |
| "loss": 0.0537, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 1.3556701030927836, | |
| "grad_norm": 1.1315557610677505, | |
| "learning_rate": 6.721071672766407e-06, | |
| "loss": 0.1242, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 1.3608247422680413, | |
| "grad_norm": 0.664551606971833, | |
| "learning_rate": 6.6928418115568994e-06, | |
| "loss": 0.05, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 1.365979381443299, | |
| "grad_norm": 0.6628232247101428, | |
| "learning_rate": 6.6645508686019225e-06, | |
| "loss": 0.0709, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 1.3711340206185567, | |
| "grad_norm": 1.3704143398811302, | |
| "learning_rate": 6.636199864705766e-06, | |
| "loss": 0.1788, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 1.3762886597938144, | |
| "grad_norm": 0.653208777448595, | |
| "learning_rate": 6.607789822839855e-06, | |
| "loss": 0.0605, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 1.3814432989690721, | |
| "grad_norm": 0.7238199007998108, | |
| "learning_rate": 6.579321768105845e-06, | |
| "loss": 0.081, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 1.3865979381443299, | |
| "grad_norm": 0.7205533607672164, | |
| "learning_rate": 6.550796727698639e-06, | |
| "loss": 0.067, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 1.3917525773195876, | |
| "grad_norm": 0.6700885764195117, | |
| "learning_rate": 6.52221573086931e-06, | |
| "loss": 0.0503, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.3969072164948453, | |
| "grad_norm": 1.219456301028477, | |
| "learning_rate": 6.493579808887976e-06, | |
| "loss": 0.1329, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 1.402061855670103, | |
| "grad_norm": 1.327064144446766, | |
| "learning_rate": 6.4648899950065865e-06, | |
| "loss": 0.1581, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 1.4072164948453607, | |
| "grad_norm": 0.8319301363228734, | |
| "learning_rate": 6.436147324421635e-06, | |
| "loss": 0.0867, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 1.4123711340206184, | |
| "grad_norm": 1.1371748524536707, | |
| "learning_rate": 6.407352834236807e-06, | |
| "loss": 0.1496, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 1.4175257731958764, | |
| "grad_norm": 0.856128052640479, | |
| "learning_rate": 6.378507563425571e-06, | |
| "loss": 0.0804, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.422680412371134, | |
| "grad_norm": 1.062404018021394, | |
| "learning_rate": 6.349612552793675e-06, | |
| "loss": 0.1043, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 1.4278350515463918, | |
| "grad_norm": 0.9101560194006858, | |
| "learning_rate": 6.320668844941598e-06, | |
| "loss": 0.1002, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 1.4329896907216495, | |
| "grad_norm": 1.2757033041145538, | |
| "learning_rate": 6.291677484226929e-06, | |
| "loss": 0.1098, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 1.4381443298969072, | |
| "grad_norm": 1.2846837622920635, | |
| "learning_rate": 6.26263951672669e-06, | |
| "loss": 0.1197, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 1.443298969072165, | |
| "grad_norm": 0.6126591325904064, | |
| "learning_rate": 6.233555990199583e-06, | |
| "loss": 0.0526, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.4484536082474226, | |
| "grad_norm": 0.9201467393931315, | |
| "learning_rate": 6.204427954048186e-06, | |
| "loss": 0.095, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 1.4536082474226804, | |
| "grad_norm": 0.8709260663548162, | |
| "learning_rate": 6.175256459281093e-06, | |
| "loss": 0.0956, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 1.458762886597938, | |
| "grad_norm": 0.8394883872798908, | |
| "learning_rate": 6.146042558474987e-06, | |
| "loss": 0.0782, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 1.463917525773196, | |
| "grad_norm": 0.7647943393509891, | |
| "learning_rate": 6.116787305736659e-06, | |
| "loss": 0.0964, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 1.4690721649484537, | |
| "grad_norm": 1.1981279245762775, | |
| "learning_rate": 6.087491756664982e-06, | |
| "loss": 0.1251, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 1.4742268041237114, | |
| "grad_norm": 0.7281773116566167, | |
| "learning_rate": 6.058156968312808e-06, | |
| "loss": 0.0636, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 1.4793814432989691, | |
| "grad_norm": 0.9415282186895138, | |
| "learning_rate": 6.028783999148841e-06, | |
| "loss": 0.121, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 1.4845360824742269, | |
| "grad_norm": 0.8155749049438948, | |
| "learning_rate": 5.999373909019437e-06, | |
| "loss": 0.0607, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 1.4896907216494846, | |
| "grad_norm": 0.7639944315517929, | |
| "learning_rate": 5.9699277591103665e-06, | |
| "loss": 0.0548, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 1.4948453608247423, | |
| "grad_norm": 0.8636520383698243, | |
| "learning_rate": 5.940446611908519e-06, | |
| "loss": 0.0979, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "grad_norm": 0.5326124314349575, | |
| "learning_rate": 5.91093153116357e-06, | |
| "loss": 0.0373, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 1.5051546391752577, | |
| "grad_norm": 0.8280155720382041, | |
| "learning_rate": 5.881383581849601e-06, | |
| "loss": 0.0698, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 1.5103092783505154, | |
| "grad_norm": 0.9092824338400287, | |
| "learning_rate": 5.851803830126666e-06, | |
| "loss": 0.1016, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 1.5154639175257731, | |
| "grad_norm": 1.2564418341193255, | |
| "learning_rate": 5.822193343302328e-06, | |
| "loss": 0.1473, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 1.5206185567010309, | |
| "grad_norm": 0.8983314520281567, | |
| "learning_rate": 5.792553189793141e-06, | |
| "loss": 0.1071, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 1.5257731958762886, | |
| "grad_norm": 0.9871176927471953, | |
| "learning_rate": 5.762884439086108e-06, | |
| "loss": 0.1235, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 1.5309278350515463, | |
| "grad_norm": 1.0212569217738547, | |
| "learning_rate": 5.733188161700084e-06, | |
| "loss": 0.1144, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 1.536082474226804, | |
| "grad_norm": 1.0505577277639713, | |
| "learning_rate": 5.703465429147153e-06, | |
| "loss": 0.0856, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 1.5412371134020617, | |
| "grad_norm": 0.9198501514769984, | |
| "learning_rate": 5.673717313893963e-06, | |
| "loss": 0.0857, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 1.5463917525773194, | |
| "grad_norm": 0.4413128663709364, | |
| "learning_rate": 5.643944889323031e-06, | |
| "loss": 0.0339, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.5515463917525774, | |
| "grad_norm": 0.9176541299396651, | |
| "learning_rate": 5.6141492296940104e-06, | |
| "loss": 0.0883, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 1.556701030927835, | |
| "grad_norm": 0.4947875542348694, | |
| "learning_rate": 5.584331410104934e-06, | |
| "loss": 0.0282, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 1.5618556701030928, | |
| "grad_norm": 1.1769881441147931, | |
| "learning_rate": 5.554492506453415e-06, | |
| "loss": 0.1418, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 1.5670103092783505, | |
| "grad_norm": 0.8273504464851464, | |
| "learning_rate": 5.524633595397829e-06, | |
| "loss": 0.0639, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 1.5721649484536082, | |
| "grad_norm": 1.104084036749989, | |
| "learning_rate": 5.494755754318472e-06, | |
| "loss": 0.1496, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 1.577319587628866, | |
| "grad_norm": 0.722149475333966, | |
| "learning_rate": 5.464860061278673e-06, | |
| "loss": 0.06, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 1.5824742268041239, | |
| "grad_norm": 0.765678640808434, | |
| "learning_rate": 5.434947594985903e-06, | |
| "loss": 0.064, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 1.5876288659793816, | |
| "grad_norm": 0.8133799235994497, | |
| "learning_rate": 5.40501943475286e-06, | |
| "loss": 0.0834, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 1.5927835051546393, | |
| "grad_norm": 1.0133768144130846, | |
| "learning_rate": 5.375076660458503e-06, | |
| "loss": 0.1211, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 1.597938144329897, | |
| "grad_norm": 0.8911686618887108, | |
| "learning_rate": 5.345120352509114e-06, | |
| "loss": 0.1135, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.6030927835051547, | |
| "grad_norm": 0.9837172309407369, | |
| "learning_rate": 5.315151591799293e-06, | |
| "loss": 0.0939, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 1.6082474226804124, | |
| "grad_norm": 0.9603201029901375, | |
| "learning_rate": 5.28517145967297e-06, | |
| "loss": 0.1191, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 1.6134020618556701, | |
| "grad_norm": 0.7568038226377672, | |
| "learning_rate": 5.255181037884377e-06, | |
| "loss": 0.0878, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 1.6185567010309279, | |
| "grad_norm": 0.7396794148405889, | |
| "learning_rate": 5.225181408559028e-06, | |
| "loss": 0.0713, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 1.6237113402061856, | |
| "grad_norm": 0.8098683954087241, | |
| "learning_rate": 5.195173654154662e-06, | |
| "loss": 0.06, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 1.6288659793814433, | |
| "grad_norm": 0.9687294980139846, | |
| "learning_rate": 5.165158857422191e-06, | |
| "loss": 0.0856, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 1.634020618556701, | |
| "grad_norm": 0.991898312261934, | |
| "learning_rate": 5.135138101366633e-06, | |
| "loss": 0.1205, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 1.6391752577319587, | |
| "grad_norm": 0.5573665413209947, | |
| "learning_rate": 5.105112469208032e-06, | |
| "loss": 0.0497, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 1.6443298969072164, | |
| "grad_norm": 1.0382601563430964, | |
| "learning_rate": 5.075083044342371e-06, | |
| "loss": 0.0852, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 1.6494845360824741, | |
| "grad_norm": 0.9967396087063322, | |
| "learning_rate": 5.045050910302485e-06, | |
| "loss": 0.0877, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.6546391752577319, | |
| "grad_norm": 0.7962058373120292, | |
| "learning_rate": 5.015017150718961e-06, | |
| "loss": 0.0837, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 1.6597938144329896, | |
| "grad_norm": 0.6489956586111506, | |
| "learning_rate": 4.984982849281041e-06, | |
| "loss": 0.0394, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 1.6649484536082473, | |
| "grad_norm": 0.6615424807101872, | |
| "learning_rate": 4.9549490896975175e-06, | |
| "loss": 0.0507, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 1.670103092783505, | |
| "grad_norm": 1.0065188138819108, | |
| "learning_rate": 4.92491695565763e-06, | |
| "loss": 0.0913, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 1.675257731958763, | |
| "grad_norm": 0.9338664154455895, | |
| "learning_rate": 4.894887530791968e-06, | |
| "loss": 0.0829, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 1.6804123711340206, | |
| "grad_norm": 0.8619735255759078, | |
| "learning_rate": 4.864861898633367e-06, | |
| "loss": 0.1015, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 1.6855670103092784, | |
| "grad_norm": 1.1629690007707925, | |
| "learning_rate": 4.8348411425778105e-06, | |
| "loss": 0.1093, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 1.690721649484536, | |
| "grad_norm": 0.8508681932219984, | |
| "learning_rate": 4.80482634584534e-06, | |
| "loss": 0.0787, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 1.6958762886597938, | |
| "grad_norm": 0.7692449841056024, | |
| "learning_rate": 4.774818591440974e-06, | |
| "loss": 0.0958, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 1.7010309278350515, | |
| "grad_norm": 0.8553151177506546, | |
| "learning_rate": 4.744818962115624e-06, | |
| "loss": 0.082, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.7061855670103094, | |
| "grad_norm": 0.5903484968058315, | |
| "learning_rate": 4.714828540327033e-06, | |
| "loss": 0.0462, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 1.7113402061855671, | |
| "grad_norm": 0.8864451372917644, | |
| "learning_rate": 4.684848408200707e-06, | |
| "loss": 0.0714, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 1.7164948453608249, | |
| "grad_norm": 0.8860344350682171, | |
| "learning_rate": 4.654879647490887e-06, | |
| "loss": 0.0691, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 1.7216494845360826, | |
| "grad_norm": 0.8183534311660873, | |
| "learning_rate": 4.624923339541498e-06, | |
| "loss": 0.0633, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 1.7268041237113403, | |
| "grad_norm": 0.614568204141328, | |
| "learning_rate": 4.594980565247143e-06, | |
| "loss": 0.0495, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 1.731958762886598, | |
| "grad_norm": 0.9930144229307591, | |
| "learning_rate": 4.565052405014098e-06, | |
| "loss": 0.1059, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 1.7371134020618557, | |
| "grad_norm": 0.7592312136391701, | |
| "learning_rate": 4.5351399387213305e-06, | |
| "loss": 0.0618, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 1.7422680412371134, | |
| "grad_norm": 1.4380591077584624, | |
| "learning_rate": 4.5052442456815294e-06, | |
| "loss": 0.0997, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 1.7474226804123711, | |
| "grad_norm": 0.9459809494967879, | |
| "learning_rate": 4.47536640460217e-06, | |
| "loss": 0.1004, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 1.7525773195876289, | |
| "grad_norm": 0.7417698139276238, | |
| "learning_rate": 4.445507493546586e-06, | |
| "loss": 0.0805, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.7577319587628866, | |
| "grad_norm": 0.8768339149771931, | |
| "learning_rate": 4.4156685898950676e-06, | |
| "loss": 0.0892, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 1.7628865979381443, | |
| "grad_norm": 0.5753560085161841, | |
| "learning_rate": 4.385850770305991e-06, | |
| "loss": 0.0546, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 1.768041237113402, | |
| "grad_norm": 0.6203625417079575, | |
| "learning_rate": 4.356055110676971e-06, | |
| "loss": 0.0557, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 1.7731958762886597, | |
| "grad_norm": 1.290592433705904, | |
| "learning_rate": 4.326282686106039e-06, | |
| "loss": 0.0924, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 1.7783505154639174, | |
| "grad_norm": 0.8501802343729535, | |
| "learning_rate": 4.296534570852848e-06, | |
| "loss": 0.0976, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 1.7835051546391751, | |
| "grad_norm": 0.7589958862583852, | |
| "learning_rate": 4.266811838299916e-06, | |
| "loss": 0.0616, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 1.7886597938144329, | |
| "grad_norm": 0.6634970203515521, | |
| "learning_rate": 4.237115560913894e-06, | |
| "loss": 0.0547, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 1.7938144329896906, | |
| "grad_norm": 0.7377540659991918, | |
| "learning_rate": 4.20744681020686e-06, | |
| "loss": 0.0562, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 1.7989690721649485, | |
| "grad_norm": 1.0154222900360976, | |
| "learning_rate": 4.1778066566976736e-06, | |
| "loss": 0.0889, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 1.8041237113402062, | |
| "grad_norm": 1.1675640094076947, | |
| "learning_rate": 4.148196169873335e-06, | |
| "loss": 0.1241, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.809278350515464, | |
| "grad_norm": 0.5783006867375596, | |
| "learning_rate": 4.118616418150398e-06, | |
| "loss": 0.0371, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 1.8144329896907216, | |
| "grad_norm": 1.067759818874075, | |
| "learning_rate": 4.089068468836431e-06, | |
| "loss": 0.1045, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 1.8195876288659794, | |
| "grad_norm": 0.8301529919273785, | |
| "learning_rate": 4.059553388091483e-06, | |
| "loss": 0.0907, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 1.824742268041237, | |
| "grad_norm": 0.8633283627168938, | |
| "learning_rate": 4.030072240889635e-06, | |
| "loss": 0.0883, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 1.829896907216495, | |
| "grad_norm": 1.0045407842411636, | |
| "learning_rate": 4.000626090980564e-06, | |
| "loss": 0.109, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 1.8350515463917527, | |
| "grad_norm": 0.6176720353760472, | |
| "learning_rate": 3.971216000851161e-06, | |
| "loss": 0.046, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 1.8402061855670104, | |
| "grad_norm": 0.6741212952732083, | |
| "learning_rate": 3.941843031687194e-06, | |
| "loss": 0.0616, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 1.8453608247422681, | |
| "grad_norm": 0.786429607982951, | |
| "learning_rate": 3.912508243335019e-06, | |
| "loss": 0.0814, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 1.8505154639175259, | |
| "grad_norm": 0.7681340177017846, | |
| "learning_rate": 3.883212694263341e-06, | |
| "loss": 0.0413, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 1.8556701030927836, | |
| "grad_norm": 0.9188584907010531, | |
| "learning_rate": 3.853957441525014e-06, | |
| "loss": 0.0917, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.8608247422680413, | |
| "grad_norm": 0.7339463471541188, | |
| "learning_rate": 3.824743540718909e-06, | |
| "loss": 0.0518, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 1.865979381443299, | |
| "grad_norm": 0.7505002518495965, | |
| "learning_rate": 3.7955720459518163e-06, | |
| "loss": 0.0611, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 1.8711340206185567, | |
| "grad_norm": 0.6977481350680906, | |
| "learning_rate": 3.7664440098004194e-06, | |
| "loss": 0.0662, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 1.8762886597938144, | |
| "grad_norm": 0.9305995047119137, | |
| "learning_rate": 3.7373604832733103e-06, | |
| "loss": 0.1134, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 1.8814432989690721, | |
| "grad_norm": 0.838945380538262, | |
| "learning_rate": 3.708322515773071e-06, | |
| "loss": 0.0848, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 1.8865979381443299, | |
| "grad_norm": 0.950916164653827, | |
| "learning_rate": 3.6793311550584043e-06, | |
| "loss": 0.0848, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 1.8917525773195876, | |
| "grad_norm": 0.8218742627036616, | |
| "learning_rate": 3.6503874472063268e-06, | |
| "loss": 0.1023, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 1.8969072164948453, | |
| "grad_norm": 0.9078783194850608, | |
| "learning_rate": 3.62149243657443e-06, | |
| "loss": 0.0931, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 1.902061855670103, | |
| "grad_norm": 0.688660913780332, | |
| "learning_rate": 3.5926471657631945e-06, | |
| "loss": 0.0589, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 1.9072164948453607, | |
| "grad_norm": 0.6952675588410017, | |
| "learning_rate": 3.563852675578368e-06, | |
| "loss": 0.0502, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 1.9123711340206184, | |
| "grad_norm": 1.3884427251376699, | |
| "learning_rate": 3.535110004993414e-06, | |
| "loss": 0.1733, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 1.9175257731958761, | |
| "grad_norm": 1.0258997166309576, | |
| "learning_rate": 3.506420191112023e-06, | |
| "loss": 0.1182, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 1.922680412371134, | |
| "grad_norm": 0.6919148043225545, | |
| "learning_rate": 3.477784269130691e-06, | |
| "loss": 0.07, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 1.9278350515463918, | |
| "grad_norm": 0.9093312963775791, | |
| "learning_rate": 3.449203272301362e-06, | |
| "loss": 0.0967, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 1.9329896907216495, | |
| "grad_norm": 0.8167616489115295, | |
| "learning_rate": 3.4206782318941556e-06, | |
| "loss": 0.0586, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 1.9381443298969072, | |
| "grad_norm": 0.8922256610004783, | |
| "learning_rate": 3.3922101771601475e-06, | |
| "loss": 0.0716, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 1.943298969072165, | |
| "grad_norm": 1.0215655504364405, | |
| "learning_rate": 3.363800135294236e-06, | |
| "loss": 0.1191, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 1.9484536082474226, | |
| "grad_norm": 1.3202130028849257, | |
| "learning_rate": 3.3354491313980774e-06, | |
| "loss": 0.1322, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 1.9536082474226806, | |
| "grad_norm": 1.0653863680738789, | |
| "learning_rate": 3.3071581884431014e-06, | |
| "loss": 0.1362, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 1.9587628865979383, | |
| "grad_norm": 0.7846807384773636, | |
| "learning_rate": 3.2789283272335935e-06, | |
| "loss": 0.0823, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.963917525773196, | |
| "grad_norm": 1.005891449118457, | |
| "learning_rate": 3.250760566369864e-06, | |
| "loss": 0.1152, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 1.9690721649484537, | |
| "grad_norm": 0.8386229662220965, | |
| "learning_rate": 3.2226559222114974e-06, | |
| "loss": 0.0743, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 1.9742268041237114, | |
| "grad_norm": 0.8015266394615227, | |
| "learning_rate": 3.194615408840678e-06, | |
| "loss": 0.0818, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 1.9793814432989691, | |
| "grad_norm": 1.1193048769312743, | |
| "learning_rate": 3.1666400380255944e-06, | |
| "loss": 0.156, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 1.9845360824742269, | |
| "grad_norm": 0.8507066467744703, | |
| "learning_rate": 3.1387308191839495e-06, | |
| "loss": 0.0967, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 1.9896907216494846, | |
| "grad_norm": 0.6565071516482492, | |
| "learning_rate": 3.110888759346512e-06, | |
| "loss": 0.0469, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 1.9948453608247423, | |
| "grad_norm": 0.7510076309546407, | |
| "learning_rate": 3.0831148631208043e-06, | |
| "loss": 0.0607, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.9275588340131792, | |
| "learning_rate": 3.055410132654838e-06, | |
| "loss": 0.0939, | |
| "step": 388 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 582, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 128421150621696.0, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |