| { |
| "best_metric": 4.33919095993042, |
| "best_model_checkpoint": "/mmfs1/gscratch/stf/abhinavp/corpus-filtering/outputs/full2/lstm/1/checkpoints/checkpoint-228960", |
| "epoch": 0.025000606015738065, |
| "eval_steps": 10, |
| "global_step": 228960, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.999998362119627e-05, |
| "loss": 10.8218, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.999161405248948e-05, |
| "loss": 7.555, |
| "step": 512 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.998322810497896e-05, |
| "loss": 7.0563, |
| "step": 1024 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.997484215746844e-05, |
| "loss": 6.9931, |
| "step": 1536 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.996645620995792e-05, |
| "loss": 6.9482, |
| "step": 2048 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.99580702624474e-05, |
| "loss": 6.917, |
| "step": 2560 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.994968431493688e-05, |
| "loss": 6.7448, |
| "step": 3072 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.994129836742636e-05, |
| "loss": 6.6294, |
| "step": 3584 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.993291241991584e-05, |
| "loss": 6.5261, |
| "step": 4096 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.992452647240532e-05, |
| "loss": 6.454, |
| "step": 4608 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.99161405248948e-05, |
| "loss": 6.4008, |
| "step": 5120 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.990775457738428e-05, |
| "loss": 6.3509, |
| "step": 5632 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.989936862987376e-05, |
| "loss": 6.2952, |
| "step": 6144 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.989098268236324e-05, |
| "loss": 6.2253, |
| "step": 6656 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.988259673485272e-05, |
| "loss": 6.1744, |
| "step": 7168 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.98742107873422e-05, |
| "loss": 6.1157, |
| "step": 7680 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.986582483983168e-05, |
| "loss": 6.0711, |
| "step": 8192 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.985743889232116e-05, |
| "loss": 6.0243, |
| "step": 8704 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.984905294481064e-05, |
| "loss": 5.9811, |
| "step": 9216 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.984066699730012e-05, |
| "loss": 5.9349, |
| "step": 9728 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.983229742859333e-05, |
| "loss": 5.9024, |
| "step": 10240 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.982391148108281e-05, |
| "loss": 5.8638, |
| "step": 10752 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.981552553357229e-05, |
| "loss": 5.8368, |
| "step": 11264 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.980713958606178e-05, |
| "loss": 5.7953, |
| "step": 11776 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9798770017354986e-05, |
| "loss": 5.7841, |
| "step": 12288 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9790384069844466e-05, |
| "loss": 5.752, |
| "step": 12800 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9781998122333946e-05, |
| "loss": 5.7243, |
| "step": 13312 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9773612174823426e-05, |
| "loss": 5.692, |
| "step": 13824 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9765242606116635e-05, |
| "loss": 5.6711, |
| "step": 14336 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9756856658606115e-05, |
| "loss": 5.6565, |
| "step": 14848 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9748470711095595e-05, |
| "loss": 5.6248, |
| "step": 15360 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.974008476358507e-05, |
| "loss": 5.6156, |
| "step": 15872 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9731715194878284e-05, |
| "loss": 5.5887, |
| "step": 16384 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9723329247367764e-05, |
| "loss": 5.5708, |
| "step": 16896 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9714943299857244e-05, |
| "loss": 5.5686, |
| "step": 17408 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9706557352346724e-05, |
| "loss": 5.55, |
| "step": 17920 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9698171404836204e-05, |
| "loss": 5.5264, |
| "step": 18432 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9689785457325684e-05, |
| "loss": 5.4982, |
| "step": 18944 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.96814158886189e-05, |
| "loss": 5.4893, |
| "step": 19456 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.967302994110837e-05, |
| "loss": 5.4643, |
| "step": 19968 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.966464399359785e-05, |
| "loss": 5.4617, |
| "step": 20480 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.965625804608733e-05, |
| "loss": 5.4387, |
| "step": 20992 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.964787209857681e-05, |
| "loss": 5.432, |
| "step": 21504 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.963948615106629e-05, |
| "loss": 5.4066, |
| "step": 22016 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.96311165823595e-05, |
| "loss": 5.4089, |
| "step": 22528 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.962273063484898e-05, |
| "loss": 5.3943, |
| "step": 23040 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.961434468733847e-05, |
| "loss": 5.3947, |
| "step": 23552 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.960595873982795e-05, |
| "loss": 5.381, |
| "step": 24064 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.959757279231743e-05, |
| "loss": 5.3524, |
| "step": 24576 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.958920322361064e-05, |
| "loss": 5.3443, |
| "step": 25088 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.958081727610012e-05, |
| "loss": 5.3531, |
| "step": 25600 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.95724313285896e-05, |
| "loss": 5.3329, |
| "step": 26112 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.956404538107908e-05, |
| "loss": 5.3196, |
| "step": 26624 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9555692191176016e-05, |
| "loss": 5.3008, |
| "step": 27136 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9547306243665496e-05, |
| "loss": 5.304, |
| "step": 27648 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9538920296154976e-05, |
| "loss": 5.2846, |
| "step": 28160 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9530534348644456e-05, |
| "loss": 5.2954, |
| "step": 28672 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9522148401133936e-05, |
| "loss": 5.2596, |
| "step": 29184 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.951376245362342e-05, |
| "loss": 5.2624, |
| "step": 29696 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.95053765061129e-05, |
| "loss": 5.2577, |
| "step": 30208 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.949699055860238e-05, |
| "loss": 5.2416, |
| "step": 30720 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.948860461109186e-05, |
| "loss": 5.2481, |
| "step": 31232 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.948021866358134e-05, |
| "loss": 5.2192, |
| "step": 31744 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.947186547367828e-05, |
| "loss": 5.2145, |
| "step": 32256 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.946347952616776e-05, |
| "loss": 5.2026, |
| "step": 32768 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.945509357865724e-05, |
| "loss": 5.2226, |
| "step": 33280 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.944670763114672e-05, |
| "loss": 5.1883, |
| "step": 33792 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.94383216836362e-05, |
| "loss": 5.1898, |
| "step": 34304 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.942993573612568e-05, |
| "loss": 5.1727, |
| "step": 34816 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.942154978861516e-05, |
| "loss": 5.1665, |
| "step": 35328 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.941316384110464e-05, |
| "loss": 5.1752, |
| "step": 35840 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.940477789359412e-05, |
| "loss": 5.1664, |
| "step": 36352 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9396408324887336e-05, |
| "loss": 5.1582, |
| "step": 36864 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9388038756180545e-05, |
| "loss": 5.157, |
| "step": 37376 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9379652808670025e-05, |
| "loss": 5.1584, |
| "step": 37888 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9371266861159505e-05, |
| "loss": 5.1269, |
| "step": 38400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9362880913648985e-05, |
| "loss": 5.1356, |
| "step": 38912 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9354511344942194e-05, |
| "loss": 5.1125, |
| "step": 39424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9346125397431674e-05, |
| "loss": 5.1064, |
| "step": 39936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9337739449921154e-05, |
| "loss": 5.1086, |
| "step": 40448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9329353502410634e-05, |
| "loss": 5.1081, |
| "step": 40960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9320967554900114e-05, |
| "loss": 5.0958, |
| "step": 41472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9312581607389594e-05, |
| "loss": 5.1051, |
| "step": 41984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.9304195659879074e-05, |
| "loss": 5.0851, |
| "step": 42496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.929582609117229e-05, |
| "loss": 5.0683, |
| "step": 43008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.928744014366177e-05, |
| "loss": 5.0626, |
| "step": 43520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.927905419615125e-05, |
| "loss": 5.076, |
| "step": 44032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.927066824864073e-05, |
| "loss": 5.0564, |
| "step": 44544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.92622823011302e-05, |
| "loss": 5.055, |
| "step": 45056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.925389635361968e-05, |
| "loss": 5.0504, |
| "step": 45568 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.924551040610916e-05, |
| "loss": 5.0379, |
| "step": 46080 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.923712445859864e-05, |
| "loss": 5.0403, |
| "step": 46592 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.922875488989185e-05, |
| "loss": 5.033, |
| "step": 47104 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.922038532118507e-05, |
| "loss": 5.032, |
| "step": 47616 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.921199937367455e-05, |
| "loss": 5.0306, |
| "step": 48128 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.920361342616403e-05, |
| "loss": 5.0205, |
| "step": 48640 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.919522747865351e-05, |
| "loss": 5.0025, |
| "step": 49152 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.918684153114299e-05, |
| "loss": 5.005, |
| "step": 49664 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.917845558363247e-05, |
| "loss": 4.9933, |
| "step": 50176 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.917006963612195e-05, |
| "loss": 4.9917, |
| "step": 50688 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.916168368861143e-05, |
| "loss": 5.0004, |
| "step": 51200 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9153314119904636e-05, |
| "loss": 4.9791, |
| "step": 51712 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9144928172394116e-05, |
| "loss": 4.9824, |
| "step": 52224 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9136542224883596e-05, |
| "loss": 4.977, |
| "step": 52736 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9128156277373076e-05, |
| "loss": 4.9655, |
| "step": 53248 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9119786708666285e-05, |
| "loss": 4.9573, |
| "step": 53760 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9111400761155765e-05, |
| "loss": 4.9581, |
| "step": 54272 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.910303119244898e-05, |
| "loss": 4.9406, |
| "step": 54784 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.909464524493846e-05, |
| "loss": 4.943, |
| "step": 55296 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.908625929742794e-05, |
| "loss": 4.9403, |
| "step": 55808 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.907787334991742e-05, |
| "loss": 4.9402, |
| "step": 56320 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.90694874024069e-05, |
| "loss": 4.9318, |
| "step": 56832 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.906110145489638e-05, |
| "loss": 4.9289, |
| "step": 57344 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.905271550738586e-05, |
| "loss": 4.9204, |
| "step": 57856 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.904432955987534e-05, |
| "loss": 4.923, |
| "step": 58368 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.903594361236482e-05, |
| "loss": 4.9246, |
| "step": 58880 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.902757404365803e-05, |
| "loss": 4.9166, |
| "step": 59392 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.901918809614751e-05, |
| "loss": 4.9164, |
| "step": 59904 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.901080214863699e-05, |
| "loss": 4.9039, |
| "step": 60416 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.900241620112647e-05, |
| "loss": 4.9077, |
| "step": 60928 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8994046632419686e-05, |
| "loss": 4.9036, |
| "step": 61440 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8985660684909166e-05, |
| "loss": 4.8881, |
| "step": 61952 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8977274737398646e-05, |
| "loss": 4.8869, |
| "step": 62464 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8968888789888125e-05, |
| "loss": 4.8841, |
| "step": 62976 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8960519221181335e-05, |
| "loss": 4.8836, |
| "step": 63488 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8952133273670815e-05, |
| "loss": 4.8711, |
| "step": 64000 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8943747326160294e-05, |
| "loss": 4.8782, |
| "step": 64512 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8935361378649774e-05, |
| "loss": 4.8687, |
| "step": 65024 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8926991809942984e-05, |
| "loss": 4.8628, |
| "step": 65536 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8918605862432463e-05, |
| "loss": 4.8554, |
| "step": 66048 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8910219914921943e-05, |
| "loss": 4.8731, |
| "step": 66560 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.8901833967411423e-05, |
| "loss": 4.8627, |
| "step": 67072 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.88934480199009e-05, |
| "loss": 4.8531, |
| "step": 67584 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.888506207239038e-05, |
| "loss": 4.8518, |
| "step": 68096 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.887667612487986e-05, |
| "loss": 4.8503, |
| "step": 68608 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.886829017736934e-05, |
| "loss": 4.8473, |
| "step": 69120 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.885990422985882e-05, |
| "loss": 4.8495, |
| "step": 69632 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.88515182823483e-05, |
| "loss": 4.8407, |
| "step": 70144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.884314871364151e-05, |
| "loss": 4.8344, |
| "step": 70656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.883476276613099e-05, |
| "loss": 4.8315, |
| "step": 71168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.882639319742421e-05, |
| "loss": 4.8201, |
| "step": 71680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.881800724991369e-05, |
| "loss": 4.8154, |
| "step": 72192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.880962130240316e-05, |
| "loss": 4.817, |
| "step": 72704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.880123535489264e-05, |
| "loss": 4.8199, |
| "step": 73216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.879284940738212e-05, |
| "loss": 4.8122, |
| "step": 73728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.87844634598716e-05, |
| "loss": 4.8117, |
| "step": 74240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.877607751236109e-05, |
| "loss": 4.808, |
| "step": 74752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.876769156485057e-05, |
| "loss": 4.8072, |
| "step": 75264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.875930561734005e-05, |
| "loss": 4.8127, |
| "step": 75776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.875093604863326e-05, |
| "loss": 4.7908, |
| "step": 76288 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.759054660797119, |
| "eval_runtime": 295.275, |
| "eval_samples_per_second": 1292.324, |
| "eval_steps_per_second": 40.386, |
| "step": 76320 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.874255010112274e-05, |
| "loss": 4.7823, |
| "step": 76800 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.873416415361222e-05, |
| "loss": 4.7797, |
| "step": 77312 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8725794584905426e-05, |
| "loss": 4.7991, |
| "step": 77824 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8717408637394906e-05, |
| "loss": 4.7837, |
| "step": 78336 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8709022689884386e-05, |
| "loss": 4.788, |
| "step": 78848 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8700636742373866e-05, |
| "loss": 4.7796, |
| "step": 79360 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8692250794863346e-05, |
| "loss": 4.7754, |
| "step": 79872 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8683864847352826e-05, |
| "loss": 4.7615, |
| "step": 80384 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.867549527864604e-05, |
| "loss": 4.7659, |
| "step": 80896 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.866710933113552e-05, |
| "loss": 4.7631, |
| "step": 81408 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8658723383625e-05, |
| "loss": 4.7861, |
| "step": 81920 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.865033743611448e-05, |
| "loss": 4.7678, |
| "step": 82432 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.864195148860396e-05, |
| "loss": 4.7529, |
| "step": 82944 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.863356554109344e-05, |
| "loss": 4.7516, |
| "step": 83456 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.862517959358292e-05, |
| "loss": 4.7511, |
| "step": 83968 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.86167936460724e-05, |
| "loss": 4.7418, |
| "step": 84480 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8608407698561874e-05, |
| "loss": 4.7549, |
| "step": 84992 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.860003812985509e-05, |
| "loss": 4.7466, |
| "step": 85504 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.859165218234457e-05, |
| "loss": 4.7369, |
| "step": 86016 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.858328261363778e-05, |
| "loss": 4.7538, |
| "step": 86528 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.857489666612726e-05, |
| "loss": 4.7371, |
| "step": 87040 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.856651071861674e-05, |
| "loss": 4.7382, |
| "step": 87552 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8558124771106226e-05, |
| "loss": 4.7249, |
| "step": 88064 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8549755202399435e-05, |
| "loss": 4.7483, |
| "step": 88576 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8541369254888915e-05, |
| "loss": 4.7239, |
| "step": 89088 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8532983307378395e-05, |
| "loss": 4.7217, |
| "step": 89600 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.8524597359867875e-05, |
| "loss": 4.7154, |
| "step": 90112 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.851621141235735e-05, |
| "loss": 4.7162, |
| "step": 90624 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 4.850782546484683e-05, |
| "loss": 4.7125, |
| "step": 91136 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.849943951733631e-05, |
| "loss": 4.7088, |
| "step": 91648 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8491069948629524e-05, |
| "loss": 4.7152, |
| "step": 92160 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8482684001119e-05, |
| "loss": 4.7097, |
| "step": 92672 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.847431443241221e-05, |
| "loss": 4.7069, |
| "step": 93184 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.846592848490169e-05, |
| "loss": 4.7163, |
| "step": 93696 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.845754253739117e-05, |
| "loss": 4.7071, |
| "step": 94208 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.844917296868439e-05, |
| "loss": 4.7057, |
| "step": 94720 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.844078702117387e-05, |
| "loss": 4.6878, |
| "step": 95232 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.843240107366335e-05, |
| "loss": 4.6928, |
| "step": 95744 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.842401512615282e-05, |
| "loss": 4.6826, |
| "step": 96256 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.84156291786423e-05, |
| "loss": 4.6916, |
| "step": 96768 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.840724323113178e-05, |
| "loss": 4.6798, |
| "step": 97280 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.839885728362126e-05, |
| "loss": 4.6866, |
| "step": 97792 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.839047133611074e-05, |
| "loss": 4.6687, |
| "step": 98304 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.838208538860022e-05, |
| "loss": 4.6909, |
| "step": 98816 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.83736994410897e-05, |
| "loss": 4.6784, |
| "step": 99328 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.836531349357918e-05, |
| "loss": 4.6819, |
| "step": 99840 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.835692754606866e-05, |
| "loss": 4.6793, |
| "step": 100352 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.834855797736188e-05, |
| "loss": 4.6623, |
| "step": 100864 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.834017202985136e-05, |
| "loss": 4.6611, |
| "step": 101376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.833178608234084e-05, |
| "loss": 4.6767, |
| "step": 101888 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.832340013483032e-05, |
| "loss": 4.6632, |
| "step": 102400 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.83150141873198e-05, |
| "loss": 4.6617, |
| "step": 102912 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.830662823980928e-05, |
| "loss": 4.6504, |
| "step": 103424 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8298258671102486e-05, |
| "loss": 4.6587, |
| "step": 103936 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8289872723591966e-05, |
| "loss": 4.6429, |
| "step": 104448 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8281486776081446e-05, |
| "loss": 4.6688, |
| "step": 104960 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8273100828570926e-05, |
| "loss": 4.6327, |
| "step": 105472 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8264714881060406e-05, |
| "loss": 4.6522, |
| "step": 105984 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8256328933549886e-05, |
| "loss": 4.6479, |
| "step": 106496 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8247942986039366e-05, |
| "loss": 4.6396, |
| "step": 107008 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.823957341733258e-05, |
| "loss": 4.6479, |
| "step": 107520 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.823118746982206e-05, |
| "loss": 4.635, |
| "step": 108032 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8222801522311535e-05, |
| "loss": 4.6286, |
| "step": 108544 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.821443195360475e-05, |
| "loss": 4.625, |
| "step": 109056 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.820604600609423e-05, |
| "loss": 4.6493, |
| "step": 109568 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.819766005858371e-05, |
| "loss": 4.6192, |
| "step": 110080 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8189274111073184e-05, |
| "loss": 4.6326, |
| "step": 110592 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8180888163562664e-05, |
| "loss": 4.613, |
| "step": 111104 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8172502216052144e-05, |
| "loss": 4.6208, |
| "step": 111616 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8164116268541624e-05, |
| "loss": 4.6307, |
| "step": 112128 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8155730321031104e-05, |
| "loss": 4.6296, |
| "step": 112640 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.814736075232432e-05, |
| "loss": 4.6206, |
| "step": 113152 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.81389748048138e-05, |
| "loss": 4.6297, |
| "step": 113664 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.813058885730328e-05, |
| "loss": 4.6306, |
| "step": 114176 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.812220290979276e-05, |
| "loss": 4.6133, |
| "step": 114688 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.811381696228224e-05, |
| "loss": 4.6219, |
| "step": 115200 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.810543101477172e-05, |
| "loss": 4.6101, |
| "step": 115712 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.809706144606493e-05, |
| "loss": 4.6086, |
| "step": 116224 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.808867549855441e-05, |
| "loss": 4.6023, |
| "step": 116736 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.808028955104389e-05, |
| "loss": 4.6116, |
| "step": 117248 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.807190360353337e-05, |
| "loss": 4.6115, |
| "step": 117760 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.806351765602285e-05, |
| "loss": 4.619, |
| "step": 118272 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.805513170851233e-05, |
| "loss": 4.607, |
| "step": 118784 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.804674576100181e-05, |
| "loss": 4.5872, |
| "step": 119296 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.803837619229502e-05, |
| "loss": 4.5889, |
| "step": 119808 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8029990244784504e-05, |
| "loss": 4.602, |
| "step": 120320 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8021604297273984e-05, |
| "loss": 4.5961, |
| "step": 120832 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8013218349763464e-05, |
| "loss": 4.5926, |
| "step": 121344 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 4.8004832402252944e-05, |
| "loss": 4.5985, |
| "step": 121856 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7996446454742424e-05, |
| "loss": 4.5827, |
| "step": 122368 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.79880605072319e-05, |
| "loss": 4.5884, |
| "step": 122880 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.797969093852511e-05, |
| "loss": 4.5889, |
| "step": 123392 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.797130499101459e-05, |
| "loss": 4.594, |
| "step": 123904 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.796291904350407e-05, |
| "loss": 4.5915, |
| "step": 124416 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7954533095993546e-05, |
| "loss": 4.5898, |
| "step": 124928 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7946147148483026e-05, |
| "loss": 4.5713, |
| "step": 125440 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7937761200972506e-05, |
| "loss": 4.5792, |
| "step": 125952 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7929375253461986e-05, |
| "loss": 4.5759, |
| "step": 126464 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.79210056847552e-05, |
| "loss": 4.5745, |
| "step": 126976 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.791261973724468e-05, |
| "loss": 4.5898, |
| "step": 127488 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.790423378973416e-05, |
| "loss": 4.5676, |
| "step": 128000 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.789584784222364e-05, |
| "loss": 4.5728, |
| "step": 128512 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.788746189471312e-05, |
| "loss": 4.574, |
| "step": 129024 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.78790759472026e-05, |
| "loss": 4.5679, |
| "step": 129536 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.787068999969208e-05, |
| "loss": 4.5581, |
| "step": 130048 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.786230405218156e-05, |
| "loss": 4.5661, |
| "step": 130560 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.785393448347477e-05, |
| "loss": 4.5536, |
| "step": 131072 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.784554853596425e-05, |
| "loss": 4.5549, |
| "step": 131584 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.783716258845373e-05, |
| "loss": 4.5572, |
| "step": 132096 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.782877664094321e-05, |
| "loss": 4.5549, |
| "step": 132608 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.782040707223642e-05, |
| "loss": 4.5527, |
| "step": 133120 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7812021124725906e-05, |
| "loss": 4.5591, |
| "step": 133632 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7803635177215386e-05, |
| "loss": 4.5448, |
| "step": 134144 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7795249229704866e-05, |
| "loss": 4.5545, |
| "step": 134656 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7786879660998075e-05, |
| "loss": 4.5623, |
| "step": 135168 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7778493713487555e-05, |
| "loss": 4.5532, |
| "step": 135680 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7770107765977035e-05, |
| "loss": 4.5563, |
| "step": 136192 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7761721818466515e-05, |
| "loss": 4.5477, |
| "step": 136704 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7753335870955995e-05, |
| "loss": 4.5479, |
| "step": 137216 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7744966302249204e-05, |
| "loss": 4.5489, |
| "step": 137728 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7736580354738684e-05, |
| "loss": 4.5398, |
| "step": 138240 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7728194407228164e-05, |
| "loss": 4.5375, |
| "step": 138752 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7719808459717644e-05, |
| "loss": 4.5384, |
| "step": 139264 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.771143889101086e-05, |
| "loss": 4.5411, |
| "step": 139776 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.770305294350034e-05, |
| "loss": 4.5374, |
| "step": 140288 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.769466699598982e-05, |
| "loss": 4.5355, |
| "step": 140800 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.76862810484793e-05, |
| "loss": 4.5353, |
| "step": 141312 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.767789510096878e-05, |
| "loss": 4.5293, |
| "step": 141824 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.766950915345826e-05, |
| "loss": 4.5225, |
| "step": 142336 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.766112320594773e-05, |
| "loss": 4.5467, |
| "step": 142848 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.765273725843721e-05, |
| "loss": 4.5381, |
| "step": 143360 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.764436768973043e-05, |
| "loss": 4.5345, |
| "step": 143872 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.76359817422199e-05, |
| "loss": 4.53, |
| "step": 144384 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.762761217351312e-05, |
| "loss": 4.5295, |
| "step": 144896 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.76192262260026e-05, |
| "loss": 4.5286, |
| "step": 145408 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.761084027849208e-05, |
| "loss": 4.5395, |
| "step": 145920 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.760245433098156e-05, |
| "loss": 4.5302, |
| "step": 146432 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.759406838347104e-05, |
| "loss": 4.5217, |
| "step": 146944 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.758568243596052e-05, |
| "loss": 4.5221, |
| "step": 147456 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.757729648845e-05, |
| "loss": 4.5162, |
| "step": 147968 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.756891054093948e-05, |
| "loss": 4.5114, |
| "step": 148480 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7560540972232686e-05, |
| "loss": 4.518, |
| "step": 148992 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7552155024722166e-05, |
| "loss": 4.5204, |
| "step": 149504 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7543769077211646e-05, |
| "loss": 4.5098, |
| "step": 150016 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7535383129701126e-05, |
| "loss": 4.5209, |
| "step": 150528 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7526997182190606e-05, |
| "loss": 4.5171, |
| "step": 151040 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7518611234680086e-05, |
| "loss": 4.5127, |
| "step": 151552 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7510225287169566e-05, |
| "loss": 4.5163, |
| "step": 152064 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 4.7501839339659046e-05, |
| "loss": 4.503, |
| "step": 152576 |
| }, |
| { |
| "epoch": 1.03, |
| "eval_loss": 4.475676536560059, |
| "eval_runtime": 294.4035, |
| "eval_samples_per_second": 1296.15, |
| "eval_steps_per_second": 40.506, |
| "step": 152640 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7493453392148526e-05, |
| "loss": 4.4938, |
| "step": 153088 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.748506744463801e-05, |
| "loss": 4.4956, |
| "step": 153600 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.747668149712749e-05, |
| "loss": 4.5156, |
| "step": 154112 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.746829554961697e-05, |
| "loss": 4.5039, |
| "step": 154624 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7459909602106446e-05, |
| "loss": 4.5111, |
| "step": 155136 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7451523654595926e-05, |
| "loss": 4.4976, |
| "step": 155648 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7443137707085406e-05, |
| "loss": 4.5019, |
| "step": 156160 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7434751759574886e-05, |
| "loss": 4.4876, |
| "step": 156672 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7426365812064366e-05, |
| "loss": 4.4958, |
| "step": 157184 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7417979864553846e-05, |
| "loss": 4.4917, |
| "step": 157696 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7409593917043326e-05, |
| "loss": 4.514, |
| "step": 158208 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7401207969532806e-05, |
| "loss": 4.5031, |
| "step": 158720 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7392838400826015e-05, |
| "loss": 4.4793, |
| "step": 159232 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7384452453315495e-05, |
| "loss": 4.486, |
| "step": 159744 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7376066505804975e-05, |
| "loss": 4.4934, |
| "step": 160256 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.736768055829446e-05, |
| "loss": 4.4757, |
| "step": 160768 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.735929461078394e-05, |
| "loss": 4.492, |
| "step": 161280 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.735090866327342e-05, |
| "loss": 4.4876, |
| "step": 161792 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.73425227157629e-05, |
| "loss": 4.4811, |
| "step": 162304 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.733413676825238e-05, |
| "loss": 4.5019, |
| "step": 162816 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.732575082074186e-05, |
| "loss": 4.4818, |
| "step": 163328 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7317364873231334e-05, |
| "loss": 4.4886, |
| "step": 163840 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7308978925720814e-05, |
| "loss": 4.4747, |
| "step": 164352 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.730060935701403e-05, |
| "loss": 4.4991, |
| "step": 164864 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.729222340950351e-05, |
| "loss": 4.473, |
| "step": 165376 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.728387021960045e-05, |
| "loss": 4.4761, |
| "step": 165888 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.727548427208993e-05, |
| "loss": 4.4673, |
| "step": 166400 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7267098324579415e-05, |
| "loss": 4.4765, |
| "step": 166912 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.7258712377068895e-05, |
| "loss": 4.468, |
| "step": 167424 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7250326429558375e-05, |
| "loss": 4.4679, |
| "step": 167936 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7241940482047855e-05, |
| "loss": 4.4755, |
| "step": 168448 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7233554534537335e-05, |
| "loss": 4.4744, |
| "step": 168960 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.722516858702681e-05, |
| "loss": 4.4669, |
| "step": 169472 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.721678263951629e-05, |
| "loss": 4.4818, |
| "step": 169984 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7208413070809504e-05, |
| "loss": 4.469, |
| "step": 170496 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7200027123298984e-05, |
| "loss": 4.4725, |
| "step": 171008 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.719164117578846e-05, |
| "loss": 4.458, |
| "step": 171520 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.718325522827794e-05, |
| "loss": 4.463, |
| "step": 172032 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.717486928076742e-05, |
| "loss": 4.4493, |
| "step": 172544 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.71664833332569e-05, |
| "loss": 4.467, |
| "step": 173056 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7158097385746384e-05, |
| "loss": 4.4527, |
| "step": 173568 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.714974419584333e-05, |
| "loss": 4.4616, |
| "step": 174080 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.714135824833281e-05, |
| "loss": 4.4477, |
| "step": 174592 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.713297230082228e-05, |
| "loss": 4.4632, |
| "step": 175104 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.712458635331176e-05, |
| "loss": 4.4605, |
| "step": 175616 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.711620040580124e-05, |
| "loss": 4.4581, |
| "step": 176128 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.710783083709446e-05, |
| "loss": 4.4618, |
| "step": 176640 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.709944488958393e-05, |
| "loss": 4.4426, |
| "step": 177152 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.709105894207341e-05, |
| "loss": 4.4421, |
| "step": 177664 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.708267299456289e-05, |
| "loss": 4.4591, |
| "step": 178176 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.707428704705237e-05, |
| "loss": 4.4474, |
| "step": 178688 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.706590109954185e-05, |
| "loss": 4.4509, |
| "step": 179200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.705751515203134e-05, |
| "loss": 4.4353, |
| "step": 179712 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.704912920452082e-05, |
| "loss": 4.4511, |
| "step": 180224 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7040759635814026e-05, |
| "loss": 4.4265, |
| "step": 180736 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7032373688303506e-05, |
| "loss": 4.4588, |
| "step": 181248 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7024004119596715e-05, |
| "loss": 4.422, |
| "step": 181760 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7015618172086195e-05, |
| "loss": 4.4431, |
| "step": 182272 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.7007232224575675e-05, |
| "loss": 4.4431, |
| "step": 182784 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6998846277065155e-05, |
| "loss": 4.4316, |
| "step": 183296 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6990460329554635e-05, |
| "loss": 4.4475, |
| "step": 183808 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6982074382044115e-05, |
| "loss": 4.431, |
| "step": 184320 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6973688434533595e-05, |
| "loss": 4.425, |
| "step": 184832 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6965302487023075e-05, |
| "loss": 4.4225, |
| "step": 185344 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6956916539512555e-05, |
| "loss": 4.4457, |
| "step": 185856 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6948530592002035e-05, |
| "loss": 4.4218, |
| "step": 186368 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.694016102329525e-05, |
| "loss": 4.432, |
| "step": 186880 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.693177507578473e-05, |
| "loss": 4.4185, |
| "step": 187392 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.692338912827421e-05, |
| "loss": 4.4201, |
| "step": 187904 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.691500318076369e-05, |
| "loss": 4.4338, |
| "step": 188416 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.690661723325317e-05, |
| "loss": 4.4372, |
| "step": 188928 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6898231285742644e-05, |
| "loss": 4.4208, |
| "step": 189440 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6889845338232124e-05, |
| "loss": 4.4392, |
| "step": 189952 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.688147576952534e-05, |
| "loss": 4.4369, |
| "step": 190464 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.687308982201481e-05, |
| "loss": 4.4237, |
| "step": 190976 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.686470387450429e-05, |
| "loss": 4.4244, |
| "step": 191488 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.685633430579751e-05, |
| "loss": 4.4238, |
| "step": 192000 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.684794835828699e-05, |
| "loss": 4.4178, |
| "step": 192512 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.683956241077647e-05, |
| "loss": 4.4151, |
| "step": 193024 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.683117646326595e-05, |
| "loss": 4.423, |
| "step": 193536 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.682279051575543e-05, |
| "loss": 4.427, |
| "step": 194048 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.681440456824491e-05, |
| "loss": 4.4336, |
| "step": 194560 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.680601862073439e-05, |
| "loss": 4.4194, |
| "step": 195072 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.679763267322387e-05, |
| "loss": 4.4, |
| "step": 195584 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.678924672571335e-05, |
| "loss": 4.4041, |
| "step": 196096 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.678087715700656e-05, |
| "loss": 4.4178, |
| "step": 196608 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.677249120949604e-05, |
| "loss": 4.4145, |
| "step": 197120 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.676410526198552e-05, |
| "loss": 4.41, |
| "step": 197632 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.6755719314475e-05, |
| "loss": 4.4154, |
| "step": 198144 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.674733336696448e-05, |
| "loss": 4.4071, |
| "step": 198656 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.673896379825769e-05, |
| "loss": 4.4024, |
| "step": 199168 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.673057785074717e-05, |
| "loss": 4.4119, |
| "step": 199680 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.672219190323665e-05, |
| "loss": 4.4169, |
| "step": 200192 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.671380595572613e-05, |
| "loss": 4.4138, |
| "step": 200704 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.670542000821561e-05, |
| "loss": 4.4129, |
| "step": 201216 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.669705043950882e-05, |
| "loss": 4.4019, |
| "step": 201728 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.66886644919983e-05, |
| "loss": 4.4036, |
| "step": 202240 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.668027854448778e-05, |
| "loss": 4.4022, |
| "step": 202752 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.667189259697726e-05, |
| "loss": 4.4058, |
| "step": 203264 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.666350664946674e-05, |
| "loss": 4.4158, |
| "step": 203776 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.665512070195622e-05, |
| "loss": 4.3973, |
| "step": 204288 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.66467347544457e-05, |
| "loss": 4.4005, |
| "step": 204800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.663836518573891e-05, |
| "loss": 4.4029, |
| "step": 205312 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.662999561703213e-05, |
| "loss": 4.4018, |
| "step": 205824 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.662160966952161e-05, |
| "loss": 4.3886, |
| "step": 206336 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.661322372201109e-05, |
| "loss": 4.3924, |
| "step": 206848 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6604837774500567e-05, |
| "loss": 4.3916, |
| "step": 207360 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6596451826990047e-05, |
| "loss": 4.3882, |
| "step": 207872 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6588065879479526e-05, |
| "loss": 4.3893, |
| "step": 208384 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6579679931969006e-05, |
| "loss": 4.3896, |
| "step": 208896 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.657129398445848e-05, |
| "loss": 4.3916, |
| "step": 209408 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.656290803694796e-05, |
| "loss": 4.3911, |
| "step": 209920 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6554538468241175e-05, |
| "loss": 4.3813, |
| "step": 210432 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.654615252073065e-05, |
| "loss": 4.3901, |
| "step": 210944 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.653776657322013e-05, |
| "loss": 4.4011, |
| "step": 211456 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6529380625709615e-05, |
| "loss": 4.3978, |
| "step": 211968 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6520994678199095e-05, |
| "loss": 4.3929, |
| "step": 212480 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6512608730688575e-05, |
| "loss": 4.3884, |
| "step": 212992 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6504222783178055e-05, |
| "loss": 4.3882, |
| "step": 213504 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6495853214471264e-05, |
| "loss": 4.3911, |
| "step": 214016 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6487467266960744e-05, |
| "loss": 4.3778, |
| "step": 214528 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6479081319450224e-05, |
| "loss": 4.3785, |
| "step": 215040 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6470695371939704e-05, |
| "loss": 4.3805, |
| "step": 215552 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6462309424429184e-05, |
| "loss": 4.3874, |
| "step": 216064 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.645393985572239e-05, |
| "loss": 4.3802, |
| "step": 216576 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.644555390821187e-05, |
| "loss": 4.3839, |
| "step": 217088 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.643716796070135e-05, |
| "loss": 4.3785, |
| "step": 217600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.642878201319083e-05, |
| "loss": 4.375, |
| "step": 218112 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.642041244448405e-05, |
| "loss": 4.3678, |
| "step": 218624 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.641202649697353e-05, |
| "loss": 4.3918, |
| "step": 219136 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.640364054946301e-05, |
| "loss": 4.3887, |
| "step": 219648 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.639525460195249e-05, |
| "loss": 4.382, |
| "step": 220160 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.638686865444197e-05, |
| "loss": 4.3756, |
| "step": 220672 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.637848270693145e-05, |
| "loss": 4.3766, |
| "step": 221184 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.637009675942093e-05, |
| "loss": 4.3806, |
| "step": 221696 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.636171081191041e-05, |
| "loss": 4.3948, |
| "step": 222208 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.635332486439989e-05, |
| "loss": 4.378, |
| "step": 222720 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.634493891688936e-05, |
| "loss": 4.3725, |
| "step": 223232 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.633655296937884e-05, |
| "loss": 4.3722, |
| "step": 223744 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.632818340067206e-05, |
| "loss": 4.3692, |
| "step": 224256 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.631979745316154e-05, |
| "loss": 4.3677, |
| "step": 224768 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.631141150565102e-05, |
| "loss": 4.3699, |
| "step": 225280 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.63030255581405e-05, |
| "loss": 4.375, |
| "step": 225792 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.629465598943371e-05, |
| "loss": 4.3674, |
| "step": 226304 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.628627004192319e-05, |
| "loss": 4.3729, |
| "step": 226816 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.6277884094412666e-05, |
| "loss": 4.3761, |
| "step": 227328 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.626953090450961e-05, |
| "loss": 4.3663, |
| "step": 227840 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.626114495699909e-05, |
| "loss": 4.3722, |
| "step": 228352 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.625275900948857e-05, |
| "loss": 4.3573, |
| "step": 228864 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 4.33919095993042, |
| "eval_runtime": 294.9766, |
| "eval_samples_per_second": 1293.631, |
| "eval_steps_per_second": 40.427, |
| "step": 228960 |
| } |
| ], |
| "logging_steps": 512, |
| "max_steps": 3052726, |
| "num_train_epochs": 9223372036854775807, |
| "save_steps": 10, |
| "total_flos": 9.459309247762675e+16, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|