| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.9911678115799804, | |
| "eval_steps": 500, | |
| "global_step": 101, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.009813542688910697, | |
| "grad_norm": 2.8672487474187687, | |
| "learning_rate": 2.5e-06, | |
| "loss": 2.1544, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.019627085377821395, | |
| "grad_norm": 2.754588790109863, | |
| "learning_rate": 5e-06, | |
| "loss": 2.142, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.029440628066732092, | |
| "grad_norm": 2.590669275853557, | |
| "learning_rate": 7.500000000000001e-06, | |
| "loss": 1.9074, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.03925417075564279, | |
| "grad_norm": 1.912791537410904, | |
| "learning_rate": 1e-05, | |
| "loss": 1.733, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.04906771344455348, | |
| "grad_norm": 1.2960446152840717, | |
| "learning_rate": 9.997377845227577e-06, | |
| "loss": 1.4053, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.058881256133464184, | |
| "grad_norm": 1.5094470155263513, | |
| "learning_rate": 9.98951413118856e-06, | |
| "loss": 1.24, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.06869479882237488, | |
| "grad_norm": 1.4729523990902567, | |
| "learning_rate": 9.97641710583307e-06, | |
| "loss": 1.1846, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.07850834151128558, | |
| "grad_norm": 0.6192450033938127, | |
| "learning_rate": 9.958100506132127e-06, | |
| "loss": 1.2066, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.08832188420019627, | |
| "grad_norm": 0.5248188615161075, | |
| "learning_rate": 9.934583543669454e-06, | |
| "loss": 0.9077, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.09813542688910697, | |
| "grad_norm": 0.4652794249979759, | |
| "learning_rate": 9.905890884491196e-06, | |
| "loss": 0.8559, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.10794896957801767, | |
| "grad_norm": 0.524548716007966, | |
| "learning_rate": 9.872052623234632e-06, | |
| "loss": 0.9704, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.11776251226692837, | |
| "grad_norm": 0.5314194335904315, | |
| "learning_rate": 9.833104251563058e-06, | |
| "loss": 0.9223, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.12757605495583907, | |
| "grad_norm": 0.3047635309199024, | |
| "learning_rate": 9.789086620939936e-06, | |
| "loss": 0.9302, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.13738959764474976, | |
| "grad_norm": 0.3819529820066455, | |
| "learning_rate": 9.740045899781353e-06, | |
| "loss": 0.8211, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.14720314033366044, | |
| "grad_norm": 0.39909585000271475, | |
| "learning_rate": 9.68603352503172e-06, | |
| "loss": 0.8913, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.15701668302257116, | |
| "grad_norm": 0.3479302056108582, | |
| "learning_rate": 9.627106148213521e-06, | |
| "loss": 0.9723, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.16683022571148184, | |
| "grad_norm": 0.3766959942077643, | |
| "learning_rate": 9.563325576007702e-06, | |
| "loss": 0.8522, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.17664376840039253, | |
| "grad_norm": 0.2857198795401394, | |
| "learning_rate": 9.494758705426978e-06, | |
| "loss": 0.9633, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.18645731108930325, | |
| "grad_norm": 0.2993174163090425, | |
| "learning_rate": 9.421477453650118e-06, | |
| "loss": 0.8605, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.19627085377821393, | |
| "grad_norm": 0.3107401663608036, | |
| "learning_rate": 9.343558682590757e-06, | |
| "loss": 0.8296, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.20608439646712462, | |
| "grad_norm": 0.3270368688125023, | |
| "learning_rate": 9.261084118279846e-06, | |
| "loss": 0.8632, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.21589793915603533, | |
| "grad_norm": 0.32890526306764684, | |
| "learning_rate": 9.174140265146356e-06, | |
| "loss": 0.954, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.22571148184494602, | |
| "grad_norm": 0.2208062222434778, | |
| "learning_rate": 9.082818315286054e-06, | |
| "loss": 0.7973, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.23552502453385674, | |
| "grad_norm": 0.23994712657664863, | |
| "learning_rate": 8.987214052813605e-06, | |
| "loss": 0.8003, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.24533856722276742, | |
| "grad_norm": 0.3579410121713744, | |
| "learning_rate": 8.887427753398249e-06, | |
| "loss": 0.9163, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.25515210991167814, | |
| "grad_norm": 0.25858791865785286, | |
| "learning_rate": 8.783564079088478e-06, | |
| "loss": 0.9203, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.2649656526005888, | |
| "grad_norm": 0.2221965052804783, | |
| "learning_rate": 8.675731968536004e-06, | |
| "loss": 0.7986, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.2747791952894995, | |
| "grad_norm": 0.25869875995203495, | |
| "learning_rate": 8.564044522734147e-06, | |
| "loss": 0.8584, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.2845927379784102, | |
| "grad_norm": 0.25639549077125273, | |
| "learning_rate": 8.448618886390523e-06, | |
| "loss": 0.9153, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.2944062806673209, | |
| "grad_norm": 0.28969420637722326, | |
| "learning_rate": 8.329576125058406e-06, | |
| "loss": 1.0017, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.3042198233562316, | |
| "grad_norm": 0.27142079291709187, | |
| "learning_rate": 8.207041098155701e-06, | |
| "loss": 0.8834, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.3140333660451423, | |
| "grad_norm": 0.24855994809239515, | |
| "learning_rate": 8.081142328004638e-06, | |
| "loss": 0.8804, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.323846908734053, | |
| "grad_norm": 0.2932484533076321, | |
| "learning_rate": 7.952011865029614e-06, | |
| "loss": 0.8055, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.3336604514229637, | |
| "grad_norm": 0.2899949936434053, | |
| "learning_rate": 7.819785149254534e-06, | |
| "loss": 0.7479, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.3434739941118744, | |
| "grad_norm": 0.23167648356664933, | |
| "learning_rate": 7.68460086824492e-06, | |
| "loss": 0.8886, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.35328753680078506, | |
| "grad_norm": 0.2640358818005348, | |
| "learning_rate": 7.546600811643816e-06, | |
| "loss": 0.9551, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.3631010794896958, | |
| "grad_norm": 0.2595883854771292, | |
| "learning_rate": 7.405929722454026e-06, | |
| "loss": 0.9107, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.3729146221786065, | |
| "grad_norm": 0.2471525297780812, | |
| "learning_rate": 7.262735145222696e-06, | |
| "loss": 0.8207, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.38272816486751715, | |
| "grad_norm": 0.2476329062250312, | |
| "learning_rate": 7.117167271287453e-06, | |
| "loss": 0.854, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.39254170755642787, | |
| "grad_norm": 0.26094540269164535, | |
| "learning_rate": 6.969378781246436e-06, | |
| "loss": 0.876, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.4023552502453386, | |
| "grad_norm": 0.26106316938836766, | |
| "learning_rate": 6.819524684817439e-06, | |
| "loss": 0.8629, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.41216879293424924, | |
| "grad_norm": 0.23060264618437654, | |
| "learning_rate": 6.667762158254104e-06, | |
| "loss": 0.8538, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.42198233562315995, | |
| "grad_norm": 0.24185186221890845, | |
| "learning_rate": 6.514250379489754e-06, | |
| "loss": 0.9191, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.43179587831207067, | |
| "grad_norm": 0.2393529024626251, | |
| "learning_rate": 6.3591503611817155e-06, | |
| "loss": 0.8808, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.44160942100098133, | |
| "grad_norm": 0.2605127572935315, | |
| "learning_rate": 6.202624781831269e-06, | |
| "loss": 0.8904, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.45142296368989204, | |
| "grad_norm": 0.2480445894592462, | |
| "learning_rate": 6.044837815156377e-06, | |
| "loss": 0.8717, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.46123650637880276, | |
| "grad_norm": 0.2598210274633706, | |
| "learning_rate": 5.885954957896115e-06, | |
| "loss": 0.8659, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.47105004906771347, | |
| "grad_norm": 0.25886899490619003, | |
| "learning_rate": 5.726142856227453e-06, | |
| "loss": 0.9661, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.48086359175662413, | |
| "grad_norm": 0.27445286917757566, | |
| "learning_rate": 5.5655691309764225e-06, | |
| "loss": 0.7741, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.49067713444553485, | |
| "grad_norm": 0.2998354383728768, | |
| "learning_rate": 5.404402201807022e-06, | |
| "loss": 0.7016, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.5004906771344455, | |
| "grad_norm": 0.30281526320811003, | |
| "learning_rate": 5.242811110572243e-06, | |
| "loss": 0.8998, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.5103042198233563, | |
| "grad_norm": 0.23291295344605198, | |
| "learning_rate": 5.080965344012509e-06, | |
| "loss": 0.8599, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.5201177625122669, | |
| "grad_norm": 0.2564082609308688, | |
| "learning_rate": 4.919034655987493e-06, | |
| "loss": 0.8412, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.5299313052011776, | |
| "grad_norm": 0.27404998782840256, | |
| "learning_rate": 4.757188889427761e-06, | |
| "loss": 0.8401, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.5397448478900884, | |
| "grad_norm": 0.25818007029770623, | |
| "learning_rate": 4.59559779819298e-06, | |
| "loss": 0.9063, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.549558390578999, | |
| "grad_norm": 0.26623848076266293, | |
| "learning_rate": 4.434430869023579e-06, | |
| "loss": 0.7651, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.5593719332679097, | |
| "grad_norm": 0.2658949551049741, | |
| "learning_rate": 4.27385714377255e-06, | |
| "loss": 0.9671, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.5691854759568205, | |
| "grad_norm": 0.2954557630383791, | |
| "learning_rate": 4.1140450421038865e-06, | |
| "loss": 0.8364, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.5789990186457311, | |
| "grad_norm": 0.24989225651779404, | |
| "learning_rate": 3.955162184843625e-06, | |
| "loss": 0.8642, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.5888125613346418, | |
| "grad_norm": 0.2540070747708594, | |
| "learning_rate": 3.7973752181687336e-06, | |
| "loss": 0.9487, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.5986261040235525, | |
| "grad_norm": 0.2875487613543649, | |
| "learning_rate": 3.6408496388182857e-06, | |
| "loss": 0.8931, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.6084396467124632, | |
| "grad_norm": 0.25257746440863926, | |
| "learning_rate": 3.4857496205102475e-06, | |
| "loss": 0.7283, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.6182531894013739, | |
| "grad_norm": 0.2612423385739647, | |
| "learning_rate": 3.3322378417458985e-06, | |
| "loss": 0.8973, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.6280667320902846, | |
| "grad_norm": 0.2863739428894319, | |
| "learning_rate": 3.180475315182563e-06, | |
| "loss": 0.8244, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.6378802747791953, | |
| "grad_norm": 0.25895617346448274, | |
| "learning_rate": 3.0306212187535653e-06, | |
| "loss": 0.8444, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.647693817468106, | |
| "grad_norm": 0.25861219708555827, | |
| "learning_rate": 2.882832728712551e-06, | |
| "loss": 0.9331, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.6575073601570167, | |
| "grad_norm": 0.259266449527201, | |
| "learning_rate": 2.7372648547773063e-06, | |
| "loss": 0.9111, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.6673209028459274, | |
| "grad_norm": 0.2536619784605648, | |
| "learning_rate": 2.594070277545975e-06, | |
| "loss": 0.8717, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.677134445534838, | |
| "grad_norm": 0.26066935129753177, | |
| "learning_rate": 2.4533991883561868e-06, | |
| "loss": 0.7797, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.6869479882237488, | |
| "grad_norm": 0.2517815528639677, | |
| "learning_rate": 2.315399131755081e-06, | |
| "loss": 0.8631, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.6967615309126595, | |
| "grad_norm": 0.25996941562937426, | |
| "learning_rate": 2.1802148507454675e-06, | |
| "loss": 0.8719, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.7065750736015701, | |
| "grad_norm": 0.24588266413337945, | |
| "learning_rate": 2.0479881349703885e-06, | |
| "loss": 0.9193, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.7163886162904809, | |
| "grad_norm": 0.2745377438843979, | |
| "learning_rate": 1.9188576719953635e-06, | |
| "loss": 0.861, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.7262021589793916, | |
| "grad_norm": 0.22640992501852827, | |
| "learning_rate": 1.7929589018443016e-06, | |
| "loss": 0.7663, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.7360157016683022, | |
| "grad_norm": 0.24363259853967661, | |
| "learning_rate": 1.6704238749415958e-06, | |
| "loss": 0.7472, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.745829244357213, | |
| "grad_norm": 0.2777670891337088, | |
| "learning_rate": 1.5513811136094786e-06, | |
| "loss": 0.9389, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.7556427870461236, | |
| "grad_norm": 0.26198217510926863, | |
| "learning_rate": 1.4359554772658551e-06, | |
| "loss": 0.8868, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.7654563297350343, | |
| "grad_norm": 0.25420697034959644, | |
| "learning_rate": 1.3242680314639995e-06, | |
| "loss": 0.7417, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.7752698724239451, | |
| "grad_norm": 0.27383288657704047, | |
| "learning_rate": 1.2164359209115235e-06, | |
| "loss": 0.7767, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.7850834151128557, | |
| "grad_norm": 0.26155693194049967, | |
| "learning_rate": 1.1125722466017547e-06, | |
| "loss": 0.8705, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.7948969578017664, | |
| "grad_norm": 0.2892989585599706, | |
| "learning_rate": 1.012785947186397e-06, | |
| "loss": 0.8472, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.8047105004906772, | |
| "grad_norm": 0.257453864175166, | |
| "learning_rate": 9.171816847139447e-07, | |
| "loss": 0.9102, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.8145240431795878, | |
| "grad_norm": 0.25933961134109923, | |
| "learning_rate": 8.258597348536452e-07, | |
| "loss": 0.8377, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.8243375858684985, | |
| "grad_norm": 0.2504962150041825, | |
| "learning_rate": 7.389158817201541e-07, | |
| "loss": 0.8331, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.8341511285574092, | |
| "grad_norm": 0.2420056571684986, | |
| "learning_rate": 6.564413174092443e-07, | |
| "loss": 0.8771, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.8439646712463199, | |
| "grad_norm": 0.23509290212940573, | |
| "learning_rate": 5.785225463498828e-07, | |
| "loss": 0.8197, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.8537782139352306, | |
| "grad_norm": 0.26997192628106204, | |
| "learning_rate": 5.05241294573024e-07, | |
| "loss": 0.8369, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.8635917566241413, | |
| "grad_norm": 0.2820942983849838, | |
| "learning_rate": 4.3667442399229985e-07, | |
| "loss": 0.7722, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.873405299313052, | |
| "grad_norm": 0.2516407661322018, | |
| "learning_rate": 3.728938517864794e-07, | |
| "loss": 0.8593, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.8832188420019627, | |
| "grad_norm": 0.26231652652086895, | |
| "learning_rate": 3.1396647496828245e-07, | |
| "loss": 0.8056, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.8930323846908734, | |
| "grad_norm": 0.27028297041866356, | |
| "learning_rate": 2.599541002186479e-07, | |
| "loss": 0.7567, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.9028459273797841, | |
| "grad_norm": 0.23972498640175965, | |
| "learning_rate": 2.109133790600648e-07, | |
| "loss": 0.8646, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.9126594700686947, | |
| "grad_norm": 0.25770792802506376, | |
| "learning_rate": 1.6689574843694433e-07, | |
| "loss": 0.8596, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.9224730127576055, | |
| "grad_norm": 0.25190515551071735, | |
| "learning_rate": 1.2794737676536993e-07, | |
| "loss": 0.711, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.9322865554465162, | |
| "grad_norm": 0.24771560518251803, | |
| "learning_rate": 9.410911550880474e-08, | |
| "loss": 0.8539, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.9421000981354269, | |
| "grad_norm": 0.2534003058388717, | |
| "learning_rate": 6.54164563305465e-08, | |
| "loss": 0.8507, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.9519136408243376, | |
| "grad_norm": 0.27958663472352846, | |
| "learning_rate": 4.189949386787462e-08, | |
| "loss": 0.7761, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.9617271835132483, | |
| "grad_norm": 0.28280544357692566, | |
| "learning_rate": 2.358289416693027e-08, | |
| "loss": 0.8915, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.971540726202159, | |
| "grad_norm": 0.251854611552598, | |
| "learning_rate": 1.0485868811441757e-08, | |
| "loss": 0.9458, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.9813542688910697, | |
| "grad_norm": 0.2651175828136124, | |
| "learning_rate": 2.6221547724253337e-09, | |
| "loss": 0.7782, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.9911678115799804, | |
| "grad_norm": 0.262045684118293, | |
| "learning_rate": 0.0, | |
| "loss": 0.7791, | |
| "step": 101 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 101, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 412567592239104.0, | |
| "train_batch_size": 2, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |