| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 0.970873786407767, |
| "eval_steps": 100, |
| "global_step": 600, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0, |
| "learning_rate": 5e-05, |
| "loss": 1.9981, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.9999675930251536e-05, |
| "loss": 2.0613, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.99987037294078e-05, |
| "loss": 1.8228, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.99970834226737e-05, |
| "loss": 1.707, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.999481505205661e-05, |
| "loss": 1.5271, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.999189867636535e-05, |
| "loss": 1.4562, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.998833437120866e-05, |
| "loss": 1.3805, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.998412222899321e-05, |
| "loss": 1.2998, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.997926235892124e-05, |
| "loss": 1.4383, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.997375488698769e-05, |
| "loss": 1.2441, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.996759995597697e-05, |
| "loss": 1.2275, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.996079772545923e-05, |
| "loss": 1.2233, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.995334837178625e-05, |
| "loss": 1.1886, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.9945252088086825e-05, |
| "loss": 1.1779, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 4.993650908426182e-05, |
| "loss": 1.1122, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 4.992711958697868e-05, |
| "loss": 1.0766, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 4.991708383966556e-05, |
| "loss": 1.0836, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 4.9906402102505026e-05, |
| "loss": 0.9728, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 4.989507465242732e-05, |
| "loss": 0.9883, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 4.988310178310315e-05, |
| "loss": 1.0307, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 4.9870483804936084e-05, |
| "loss": 0.9613, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 4.9857221045054535e-05, |
| "loss": 1.1038, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 4.9843313847303246e-05, |
| "loss": 0.9206, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 4.9828762572234374e-05, |
| "loss": 0.9893, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 4.9813567597098166e-05, |
| "loss": 0.8328, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 4.979772931583317e-05, |
| "loss": 0.8712, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 4.978124813905599e-05, |
| "loss": 0.8031, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 4.976412449405072e-05, |
| "loss": 0.7675, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 4.974635882475778e-05, |
| "loss": 0.7851, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 4.972795159176243e-05, |
| "loss": 0.7447, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 4.9708903272282884e-05, |
| "loss": 0.7719, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 4.9689214360157844e-05, |
| "loss": 0.7142, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 4.9668885365833795e-05, |
| "loss": 0.8649, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 4.96479168163517e-05, |
| "loss": 0.7574, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 4.9626309255333346e-05, |
| "loss": 0.9205, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 4.9604063242967315e-05, |
| "loss": 0.7987, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 4.958117935599434e-05, |
| "loss": 0.7626, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 4.955765818769249e-05, |
| "loss": 0.8332, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 4.9533500347861675e-05, |
| "loss": 0.9178, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 4.950870646280791e-05, |
| "loss": 0.9919, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 4.948327717532705e-05, |
| "loss": 0.7098, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 4.9457213144688095e-05, |
| "loss": 0.7552, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 4.9430515046616175e-05, |
| "loss": 0.7496, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 4.940318357327495e-05, |
| "loss": 0.7667, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 4.937521943324873e-05, |
| "loss": 0.8685, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 4.934662335152405e-05, |
| "loss": 0.6715, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 4.931739606947091e-05, |
| "loss": 0.8149, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 4.9287538344823544e-05, |
| "loss": 0.7346, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 4.925705095166079e-05, |
| "loss": 0.7803, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 4.922593468038599e-05, |
| "loss": 0.7451, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 4.919419033770652e-05, |
| "loss": 0.8402, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 4.916181874661286e-05, |
| "loss": 0.7988, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 4.91288207463573e-05, |
| "loss": 0.7444, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 4.9095197192432105e-05, |
| "loss": 0.8545, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 4.906094895654744e-05, |
| "loss": 0.761, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 4.902607692660865e-05, |
| "loss": 0.6741, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 4.8990582006693365e-05, |
| "loss": 0.8457, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 4.895446511702793e-05, |
| "loss": 0.8096, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 4.891772719396369e-05, |
| "loss": 0.7989, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 4.888036918995258e-05, |
| "loss": 0.7683, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 4.884239207352252e-05, |
| "loss": 0.7912, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 4.880379682925228e-05, |
| "loss": 0.7417, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 4.876458445774594e-05, |
| "loss": 0.7511, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 4.872475597560699e-05, |
| "loss": 0.8021, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 4.8684312415411897e-05, |
| "loss": 0.8154, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 4.864325482568344e-05, |
| "loss": 0.7109, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 4.860158427086341e-05, |
| "loss": 0.7915, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 4.855930183128513e-05, |
| "loss": 0.6363, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 4.851640860314536e-05, |
| "loss": 0.6987, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 4.8472905698475906e-05, |
| "loss": 0.6498, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 4.84287942451148e-05, |
| "loss": 0.7768, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 4.8384075386677054e-05, |
| "loss": 0.7979, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 4.833875028252499e-05, |
| "loss": 0.7611, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 4.8292820107738235e-05, |
| "loss": 0.7889, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 4.824628605308319e-05, |
| "loss": 0.6706, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 4.819914932498222e-05, |
| "loss": 0.7762, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 4.815141114548232e-05, |
| "loss": 0.7517, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 4.8103072752223486e-05, |
| "loss": 0.7793, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 4.805413539840659e-05, |
| "loss": 0.7306, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 4.80046003527609e-05, |
| "loss": 0.834, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 4.7954468899511215e-05, |
| "loss": 0.8076, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 4.790374233834452e-05, |
| "loss": 0.8375, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 4.7852421984376324e-05, |
| "loss": 0.7839, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 4.780050916811658e-05, |
| "loss": 0.6221, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 4.7748005235435137e-05, |
| "loss": 0.7212, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 4.76949115475269e-05, |
| "loss": 0.6369, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 4.7641229480876515e-05, |
| "loss": 0.7167, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 4.758696042722269e-05, |
| "loss": 0.6908, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 4.753210579352211e-05, |
| "loss": 0.6681, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 4.747666700191297e-05, |
| "loss": 0.6566, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 4.7420645489678076e-05, |
| "loss": 0.6869, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 4.7364042709207626e-05, |
| "loss": 0.7106, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 4.730686012796153e-05, |
| "loss": 0.6782, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 4.724909922843136e-05, |
| "loss": 0.7148, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 4.719076150810193e-05, |
| "loss": 0.8887, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 4.7131848479412476e-05, |
| "loss": 0.7408, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 4.707236166971742e-05, |
| "loss": 0.7046, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 4.7012302621246804e-05, |
| "loss": 0.7657, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 4.695167289106629e-05, |
| "loss": 0.8138, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 4.689047405103678e-05, |
| "loss": 0.6964, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.16, |
| "eval_loss": 0.796372652053833, |
| "eval_runtime": 5.3558, |
| "eval_samples_per_second": 1.867, |
| "eval_steps_per_second": 0.373, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 4.68287076877737e-05, |
| "loss": 0.6541, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 4.6766375402605824e-05, |
| "loss": 0.714, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 4.6703478811533794e-05, |
| "loss": 0.6823, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 4.6640019545188216e-05, |
| "loss": 0.8283, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 4.657599924878736e-05, |
| "loss": 0.7126, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 4.651141958209453e-05, |
| "loss": 0.6612, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 4.644628221937504e-05, |
| "loss": 0.7339, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 4.638058884935279e-05, |
| "loss": 0.6852, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 4.6314341175166485e-05, |
| "loss": 0.6804, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 4.6247540914325504e-05, |
| "loss": 0.7904, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 4.618018979866534e-05, |
| "loss": 0.6545, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 4.611228957430272e-05, |
| "loss": 0.7545, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 4.6043842001590344e-05, |
| "loss": 0.7478, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 4.5974848855071206e-05, |
| "loss": 0.8083, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 4.590531192343266e-05, |
| "loss": 0.7435, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 4.5835233009459964e-05, |
| "loss": 0.7647, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 4.576461392998961e-05, |
| "loss": 0.7631, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 4.569345651586218e-05, |
| "loss": 0.7674, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 4.5621762611874904e-05, |
| "loss": 0.6737, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 4.55495340767338e-05, |
| "loss": 0.6901, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 4.547677278300555e-05, |
| "loss": 0.709, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 4.540348061706886e-05, |
| "loss": 0.5995, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 4.5329659479065655e-05, |
| "loss": 0.6832, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 4.525531128285173e-05, |
| "loss": 0.7584, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 4.5180437955947195e-05, |
| "loss": 0.6998, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 4.510504143948646e-05, |
| "loss": 0.6477, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 4.502912368816794e-05, |
| "loss": 0.681, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 4.4952686670203357e-05, |
| "loss": 0.7638, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 4.487573236726673e-05, |
| "loss": 0.7086, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 4.4798262774442986e-05, |
| "loss": 0.6166, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 4.472027990017623e-05, |
| "loss": 0.5901, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 4.464178576621771e-05, |
| "loss": 0.8138, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 4.456278240757338e-05, |
| "loss": 0.6773, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 4.4483271872451094e-05, |
| "loss": 0.6576, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 4.44032562222076e-05, |
| "loss": 0.7106, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 4.432273753129502e-05, |
| "loss": 0.6479, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 4.4241717887207124e-05, |
| "loss": 0.7102, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 4.416019939042515e-05, |
| "loss": 0.7188, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 4.40781841543634e-05, |
| "loss": 0.6996, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 4.399567430531444e-05, |
| "loss": 0.7523, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 4.391267198239394e-05, |
| "loss": 0.6279, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 4.3829179337485254e-05, |
| "loss": 0.6718, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 4.374519853518362e-05, |
| "loss": 0.668, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 4.366073175274004e-05, |
| "loss": 0.8341, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 4.357578118000482e-05, |
| "loss": 0.7262, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 4.3490349019370824e-05, |
| "loss": 0.7613, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 4.340443748571636e-05, |
| "loss": 0.6722, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 4.331804880634775e-05, |
| "loss": 0.7229, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 4.3231185220941605e-05, |
| "loss": 0.7265, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 4.3143848981486746e-05, |
| "loss": 0.7289, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 4.305604235222582e-05, |
| "loss": 0.6441, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 4.2967767609596624e-05, |
| "loss": 0.772, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 4.287902704217304e-05, |
| "loss": 0.705, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 4.2789822950605725e-05, |
| "loss": 0.7711, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 4.2700157647562486e-05, |
| "loss": 0.7688, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 4.261003345766832e-05, |
| "loss": 0.6978, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 4.251945271744509e-05, |
| "loss": 0.7418, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 4.242841777525101e-05, |
| "loss": 0.6258, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 4.233693099121976e-05, |
| "loss": 0.7988, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 4.224499473719926e-05, |
| "loss": 0.6947, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 4.21526113966902e-05, |
| "loss": 0.6712, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 4.205978336478427e-05, |
| "loss": 0.6871, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 4.196651304810202e-05, |
| "loss": 0.6682, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 4.187280286473048e-05, |
| "loss": 0.6139, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 4.177865524416052e-05, |
| "loss": 0.6259, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 4.168407262722377e-05, |
| "loss": 0.6894, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 4.1589057466029444e-05, |
| "loss": 0.8107, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 4.149361222390068e-05, |
| "loss": 0.728, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 4.1397739375310736e-05, |
| "loss": 0.6987, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 4.1301441405818794e-05, |
| "loss": 0.7855, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 4.120472081200556e-05, |
| "loss": 0.6786, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 4.1107580101408524e-05, |
| "loss": 0.805, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 4.101002179245693e-05, |
| "loss": 0.7006, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 4.09120484144065e-05, |
| "loss": 0.7023, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 4.0813662507273885e-05, |
| "loss": 0.7802, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 4.0714866621770775e-05, |
| "loss": 0.6794, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 4.06156633192378e-05, |
| "loss": 0.6833, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 4.051605517157809e-05, |
| "loss": 0.7591, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 4.041604476119064e-05, |
| "loss": 0.7059, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 4.0315634680903336e-05, |
| "loss": 0.713, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 4.021482753390573e-05, |
| "loss": 0.686, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 4.011362593368156e-05, |
| "loss": 0.718, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 4.001203250394101e-05, |
| "loss": 0.8431, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 3.9910049878552646e-05, |
| "loss": 0.7043, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 3.9807680701475174e-05, |
| "loss": 0.7591, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 3.970492762668887e-05, |
| "loss": 0.7677, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 3.9601793318126776e-05, |
| "loss": 0.6897, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 3.9498280449605664e-05, |
| "loss": 0.753, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 3.939439170475666e-05, |
| "loss": 0.7049, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 3.929012977695572e-05, |
| "loss": 0.6654, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 3.918549736925378e-05, |
| "loss": 0.6969, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 3.9080497194306686e-05, |
| "loss": 0.6655, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 3.897513197430486e-05, |
| "loss": 0.7118, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 3.8869404440902735e-05, |
| "loss": 0.5986, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 3.876331733514792e-05, |
| "loss": 0.7191, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 3.865687340741014e-05, |
| "loss": 0.6433, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 3.855007541730996e-05, |
| "loss": 0.6386, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 3.844292613364719e-05, |
| "loss": 0.8535, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 3.833542833432916e-05, |
| "loss": 0.7112, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 3.822758480629864e-05, |
| "loss": 0.7226, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.32, |
| "eval_loss": 0.7749701738357544, |
| "eval_runtime": 5.4698, |
| "eval_samples_per_second": 1.828, |
| "eval_steps_per_second": 0.366, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 3.811939834546163e-05, |
| "loss": 0.6725, |
| "step": 201 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 3.801087175661488e-05, |
| "loss": 0.711, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 3.790200785337311e-05, |
| "loss": 0.6973, |
| "step": 203 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 3.7792809458096146e-05, |
| "loss": 0.7107, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 3.768327940181572e-05, |
| "loss": 0.6809, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 3.757342052416205e-05, |
| "loss": 0.6272, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 3.746323567329024e-05, |
| "loss": 0.7267, |
| "step": 207 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 3.7352727705806446e-05, |
| "loss": 0.764, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 3.724189948669381e-05, |
| "loss": 0.7307, |
| "step": 209 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 3.7130753889238165e-05, |
| "loss": 0.8486, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 3.7019293794953595e-05, |
| "loss": 0.7304, |
| "step": 211 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 3.690752209350767e-05, |
| "loss": 0.8079, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 3.679544168264656e-05, |
| "loss": 0.6661, |
| "step": 213 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 3.668305546811991e-05, |
| "loss": 0.7078, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 3.65703663636055e-05, |
| "loss": 0.6363, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 3.645737729063372e-05, |
| "loss": 0.6479, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 3.63440911785118e-05, |
| "loss": 0.8032, |
| "step": 217 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 3.6230510964247886e-05, |
| "loss": 0.7619, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 3.61166395924749e-05, |
| "loss": 0.6744, |
| "step": 219 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 3.600248001537418e-05, |
| "loss": 0.5971, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 3.588803519259898e-05, |
| "loss": 0.7197, |
| "step": 221 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 3.577330809119768e-05, |
| "loss": 0.7581, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 3.565830168553691e-05, |
| "loss": 0.6288, |
| "step": 223 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 3.554301895722442e-05, |
| "loss": 0.8863, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 3.542746289503181e-05, |
| "loss": 0.6523, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 3.5311636494816984e-05, |
| "loss": 0.6503, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 3.519554275944655e-05, |
| "loss": 0.7903, |
| "step": 227 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 3.50791846987179e-05, |
| "loss": 0.7974, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 3.496256532928125e-05, |
| "loss": 0.712, |
| "step": 229 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 3.484568767456135e-05, |
| "loss": 0.6632, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 3.47285547646792e-05, |
| "loss": 0.7328, |
| "step": 231 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 3.46111696363734e-05, |
| "loss": 0.6455, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 3.4493535332921475e-05, |
| "loss": 0.6295, |
| "step": 233 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 3.4375654904060956e-05, |
| "loss": 0.6613, |
| "step": 234 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 3.425753140591031e-05, |
| "loss": 0.6991, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 3.413916790088973e-05, |
| "loss": 0.6047, |
| "step": 236 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 3.402056745764172e-05, |
| "loss": 0.7401, |
| "step": 237 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 3.3901733150951535e-05, |
| "loss": 0.6909, |
| "step": 238 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 3.37826680616675e-05, |
| "loss": 0.6293, |
| "step": 239 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 3.366337527662109e-05, |
| "loss": 0.7261, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 3.354385788854694e-05, |
| "loss": 0.8233, |
| "step": 241 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 3.3424118996002624e-05, |
| "loss": 0.7349, |
| "step": 242 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 3.3304161703288385e-05, |
| "loss": 0.6952, |
| "step": 243 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 3.318398912036658e-05, |
| "loss": 0.6676, |
| "step": 244 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 3.3063604362781125e-05, |
| "loss": 0.6929, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 3.2943010551576654e-05, |
| "loss": 0.7219, |
| "step": 246 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 3.282221081321766e-05, |
| "loss": 0.7747, |
| "step": 247 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 3.27012082795074e-05, |
| "loss": 0.6931, |
| "step": 248 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 3.258000608750674e-05, |
| "loss": 0.6869, |
| "step": 249 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 3.245860737945278e-05, |
| "loss": 0.6864, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 3.233701530267743e-05, |
| "loss": 0.7052, |
| "step": 251 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 3.2215233009525786e-05, |
| "loss": 0.7917, |
| "step": 252 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 3.209326365727441e-05, |
| "loss": 0.7137, |
| "step": 253 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 3.19711104080495e-05, |
| "loss": 0.7149, |
| "step": 254 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 3.184877642874486e-05, |
| "loss": 0.694, |
| "step": 255 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 3.172626489093986e-05, |
| "loss": 0.6155, |
| "step": 256 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 3.1603578970817146e-05, |
| "loss": 0.743, |
| "step": 257 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 3.1480721849080344e-05, |
| "loss": 0.6893, |
| "step": 258 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 3.1357696710871576e-05, |
| "loss": 0.6215, |
| "step": 259 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 3.123450674568889e-05, |
| "loss": 0.6225, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 3.1111155147303574e-05, |
| "loss": 0.795, |
| "step": 261 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 3.0987645113677335e-05, |
| "loss": 0.84, |
| "step": 262 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 3.086397984687943e-05, |
| "loss": 0.6852, |
| "step": 263 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 3.074016255300359e-05, |
| "loss": 0.7301, |
| "step": 264 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 3.061619644208498e-05, |
| "loss": 0.7638, |
| "step": 265 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 3.0492084728016902e-05, |
| "loss": 0.7403, |
| "step": 266 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 3.036783062846751e-05, |
| "loss": 0.8257, |
| "step": 267 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 3.0243437364796386e-05, |
| "loss": 0.6582, |
| "step": 268 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 3.0118908161971025e-05, |
| "loss": 0.6031, |
| "step": 269 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 2.9994246248483228e-05, |
| "loss": 0.6981, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 2.9869454856265376e-05, |
| "loss": 0.6632, |
| "step": 271 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 2.9744537220606693e-05, |
| "loss": 0.7312, |
| "step": 272 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 2.9619496580069317e-05, |
| "loss": 0.6421, |
| "step": 273 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 2.9494336176404357e-05, |
| "loss": 0.6031, |
| "step": 274 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 2.9369059254467858e-05, |
| "loss": 0.77, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 2.9243669062136664e-05, |
| "loss": 0.7053, |
| "step": 276 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 2.9118168850224226e-05, |
| "loss": 0.7678, |
| "step": 277 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 2.8992561872396313e-05, |
| "loss": 0.5997, |
| "step": 278 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 2.8866851385086652e-05, |
| "loss": 0.772, |
| "step": 279 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 2.8741040647412538e-05, |
| "loss": 0.6521, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 2.8615132921090292e-05, |
| "loss": 0.6677, |
| "step": 281 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 2.8489131470350727e-05, |
| "loss": 0.6277, |
| "step": 282 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 2.836303956185453e-05, |
| "loss": 0.7005, |
| "step": 283 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 2.8236860464607534e-05, |
| "loss": 0.8361, |
| "step": 284 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 2.8110597449876014e-05, |
| "loss": 0.6893, |
| "step": 285 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 2.7984253791101834e-05, |
| "loss": 0.7185, |
| "step": 286 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 2.7857832763817622e-05, |
| "loss": 0.703, |
| "step": 287 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 2.773133764556181e-05, |
| "loss": 0.6402, |
| "step": 288 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 2.7604771715793697e-05, |
| "loss": 0.6384, |
| "step": 289 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 2.74781382558084e-05, |
| "loss": 0.8262, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 2.735144054865181e-05, |
| "loss": 0.7639, |
| "step": 291 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 2.722468187903544e-05, |
| "loss": 0.7043, |
| "step": 292 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 2.7097865533251314e-05, |
| "loss": 0.5832, |
| "step": 293 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 2.6970994799086735e-05, |
| "loss": 0.681, |
| "step": 294 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 2.6844072965739054e-05, |
| "loss": 0.658, |
| "step": 295 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 2.6717103323730396e-05, |
| "loss": 0.6749, |
| "step": 296 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 2.6590089164822364e-05, |
| "loss": 0.6759, |
| "step": 297 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 2.6463033781930673e-05, |
| "loss": 0.5983, |
| "step": 298 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 2.633594046903981e-05, |
| "loss": 0.7378, |
| "step": 299 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 2.62088125211176e-05, |
| "loss": 0.7041, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.49, |
| "eval_loss": 0.7667814493179321, |
| "eval_runtime": 5.4254, |
| "eval_samples_per_second": 1.843, |
| "eval_steps_per_second": 0.369, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 2.608165323402982e-05, |
| "loss": 0.6778, |
| "step": 301 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 2.595446590445471e-05, |
| "loss": 0.6936, |
| "step": 302 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 2.5827253829797566e-05, |
| "loss": 0.6452, |
| "step": 303 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 2.570002030810517e-05, |
| "loss": 0.709, |
| "step": 304 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 2.5572768637980367e-05, |
| "loss": 0.6602, |
| "step": 305 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 2.5445502118496485e-05, |
| "loss": 0.6696, |
| "step": 306 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 2.5318224049111844e-05, |
| "loss": 0.7019, |
| "step": 307 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 2.5190937729584203e-05, |
| "loss": 0.6444, |
| "step": 308 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 2.5063646459885193e-05, |
| "loss": 0.634, |
| "step": 309 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 2.493635354011481e-05, |
| "loss": 0.6873, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 2.4809062270415806e-05, |
| "loss": 0.6692, |
| "step": 311 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 2.4681775950888155e-05, |
| "loss": 0.5994, |
| "step": 312 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 2.4554497881503528e-05, |
| "loss": 0.6154, |
| "step": 313 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 2.442723136201964e-05, |
| "loss": 0.7084, |
| "step": 314 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 2.4299979691894835e-05, |
| "loss": 0.6467, |
| "step": 315 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 2.417274617020244e-05, |
| "loss": 0.6348, |
| "step": 316 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 2.404553409554529e-05, |
| "loss": 0.8328, |
| "step": 317 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 2.3918346765970183e-05, |
| "loss": 0.6981, |
| "step": 318 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 2.379118747888241e-05, |
| "loss": 0.6764, |
| "step": 319 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 2.3664059530960198e-05, |
| "loss": 0.8371, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 2.3536966218069332e-05, |
| "loss": 0.5997, |
| "step": 321 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 2.340991083517764e-05, |
| "loss": 0.6977, |
| "step": 322 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 2.3282896676269607e-05, |
| "loss": 0.6473, |
| "step": 323 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 2.3155927034260945e-05, |
| "loss": 0.6223, |
| "step": 324 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 2.3029005200913274e-05, |
| "loss": 0.7039, |
| "step": 325 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 2.2902134466748688e-05, |
| "loss": 0.6841, |
| "step": 326 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 2.2775318120964563e-05, |
| "loss": 0.6255, |
| "step": 327 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 2.2648559451348197e-05, |
| "loss": 0.7188, |
| "step": 328 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 2.2521861744191603e-05, |
| "loss": 0.636, |
| "step": 329 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 2.23952282842063e-05, |
| "loss": 0.6307, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 2.22686623544382e-05, |
| "loss": 0.7769, |
| "step": 331 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 2.2142167236182384e-05, |
| "loss": 0.6349, |
| "step": 332 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 2.2015746208898168e-05, |
| "loss": 0.8899, |
| "step": 333 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 2.1889402550123992e-05, |
| "loss": 0.6122, |
| "step": 334 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 2.176313953539247e-05, |
| "loss": 0.816, |
| "step": 335 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 2.163696043814547e-05, |
| "loss": 0.641, |
| "step": 336 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 2.1510868529649282e-05, |
| "loss": 0.7376, |
| "step": 337 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 2.1384867078909714e-05, |
| "loss": 0.5882, |
| "step": 338 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 2.1258959352587468e-05, |
| "loss": 0.7829, |
| "step": 339 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 2.113314861491335e-05, |
| "loss": 0.6804, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 2.1007438127603696e-05, |
| "loss": 0.6067, |
| "step": 341 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 2.0881831149775773e-05, |
| "loss": 0.6679, |
| "step": 342 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 2.0756330937863348e-05, |
| "loss": 0.7271, |
| "step": 343 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 2.0630940745532148e-05, |
| "loss": 0.6475, |
| "step": 344 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 2.050566382359565e-05, |
| "loss": 0.6982, |
| "step": 345 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 2.0380503419930686e-05, |
| "loss": 0.6722, |
| "step": 346 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 2.025546277939331e-05, |
| "loss": 0.6554, |
| "step": 347 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 2.013054514373462e-05, |
| "loss": 0.7071, |
| "step": 348 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 2.0005753751516788e-05, |
| "loss": 0.7764, |
| "step": 349 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.988109183802898e-05, |
| "loss": 0.7069, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.9756562635203623e-05, |
| "loss": 0.739, |
| "step": 351 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.96321693715325e-05, |
| "loss": 0.7021, |
| "step": 352 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.9507915271983104e-05, |
| "loss": 0.6992, |
| "step": 353 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.9383803557915016e-05, |
| "loss": 0.7653, |
| "step": 354 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.9259837446996416e-05, |
| "loss": 0.599, |
| "step": 355 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.9136020153120578e-05, |
| "loss": 0.7733, |
| "step": 356 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.9012354886322667e-05, |
| "loss": 0.7251, |
| "step": 357 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.888884485269643e-05, |
| "loss": 0.7726, |
| "step": 358 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.8765493254311115e-05, |
| "loss": 0.6247, |
| "step": 359 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.8642303289128426e-05, |
| "loss": 0.6265, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.8519278150919666e-05, |
| "loss": 0.6649, |
| "step": 361 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.8396421029182863e-05, |
| "loss": 0.7513, |
| "step": 362 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.8273735109060147e-05, |
| "loss": 0.6993, |
| "step": 363 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.8151223571255143e-05, |
| "loss": 0.6555, |
| "step": 364 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.8028889591950506e-05, |
| "loss": 0.7781, |
| "step": 365 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.7906736342725588e-05, |
| "loss": 0.6095, |
| "step": 366 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.7784766990474226e-05, |
| "loss": 0.6259, |
| "step": 367 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.7662984697322577e-05, |
| "loss": 0.6827, |
| "step": 368 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.7541392620547222e-05, |
| "loss": 0.7612, |
| "step": 369 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.7419993912493266e-05, |
| "loss": 0.7042, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.72987917204926e-05, |
| "loss": 0.6964, |
| "step": 371 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.7177789186782342e-05, |
| "loss": 0.5989, |
| "step": 372 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.7056989448423348e-05, |
| "loss": 0.6249, |
| "step": 373 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.6936395637218884e-05, |
| "loss": 0.8187, |
| "step": 374 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.6816010879633423e-05, |
| "loss": 0.6366, |
| "step": 375 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.6695838296711624e-05, |
| "loss": 0.7542, |
| "step": 376 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.6575881003997382e-05, |
| "loss": 0.7023, |
| "step": 377 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.6456142111453064e-05, |
| "loss": 0.617, |
| "step": 378 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.633662472337891e-05, |
| "loss": 0.7145, |
| "step": 379 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.6217331938332507e-05, |
| "loss": 0.548, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.6098266849048467e-05, |
| "loss": 0.6186, |
| "step": 381 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.597943254235829e-05, |
| "loss": 0.6512, |
| "step": 382 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.5860832099110275e-05, |
| "loss": 0.6484, |
| "step": 383 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.574246859408969e-05, |
| "loss": 0.6629, |
| "step": 384 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.5624345095939043e-05, |
| "loss": 0.7173, |
| "step": 385 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.5506464667078527e-05, |
| "loss": 0.5953, |
| "step": 386 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.53888303636266e-05, |
| "loss": 0.6199, |
| "step": 387 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.5271445235320803e-05, |
| "loss": 0.6776, |
| "step": 388 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.5154312325438652e-05, |
| "loss": 0.664, |
| "step": 389 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.5037434670718759e-05, |
| "loss": 0.7271, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.4920815301282098e-05, |
| "loss": 0.7179, |
| "step": 391 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.4804457240553457e-05, |
| "loss": 0.6933, |
| "step": 392 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.4688363505183017e-05, |
| "loss": 0.6486, |
| "step": 393 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.4572537104968193e-05, |
| "loss": 0.7077, |
| "step": 394 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.4456981042775581e-05, |
| "loss": 0.7225, |
| "step": 395 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.4341698314463097e-05, |
| "loss": 0.6533, |
| "step": 396 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.4226691908802326e-05, |
| "loss": 0.7462, |
| "step": 397 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.4111964807401029e-05, |
| "loss": 0.6466, |
| "step": 398 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.3997519984625823e-05, |
| "loss": 0.6793, |
| "step": 399 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.3883360407525099e-05, |
| "loss": 0.7347, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.65, |
| "eval_loss": 0.7635688781738281, |
| "eval_runtime": 5.3622, |
| "eval_samples_per_second": 1.865, |
| "eval_steps_per_second": 0.373, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.3769489035752115e-05, |
| "loss": 0.6504, |
| "step": 401 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.3655908821488203e-05, |
| "loss": 0.7998, |
| "step": 402 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.3542622709366281e-05, |
| "loss": 0.7758, |
| "step": 403 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.342963363639451e-05, |
| "loss": 0.5984, |
| "step": 404 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.3316944531880102e-05, |
| "loss": 0.7997, |
| "step": 405 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.3204558317353444e-05, |
| "loss": 0.7674, |
| "step": 406 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.3092477906492328e-05, |
| "loss": 0.6735, |
| "step": 407 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.2980706205046401e-05, |
| "loss": 0.6896, |
| "step": 408 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.286924611076183e-05, |
| "loss": 0.6908, |
| "step": 409 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.27581005133062e-05, |
| "loss": 0.7284, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.2647272294193564e-05, |
| "loss": 0.6735, |
| "step": 411 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.253676432670976e-05, |
| "loss": 0.7318, |
| "step": 412 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.2426579475837951e-05, |
| "loss": 0.6424, |
| "step": 413 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.2316720598184281e-05, |
| "loss": 0.7064, |
| "step": 414 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.2207190541903856e-05, |
| "loss": 0.6582, |
| "step": 415 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.2097992146626907e-05, |
| "loss": 0.7575, |
| "step": 416 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.198912824338514e-05, |
| "loss": 0.732, |
| "step": 417 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.1880601654538364e-05, |
| "loss": 0.6559, |
| "step": 418 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.1772415193701358e-05, |
| "loss": 0.7011, |
| "step": 419 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.1664571665670838e-05, |
| "loss": 0.6509, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.1557073866352802e-05, |
| "loss": 0.6958, |
| "step": 421 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.144992458269005e-05, |
| "loss": 0.7596, |
| "step": 422 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.1343126592589867e-05, |
| "loss": 0.6635, |
| "step": 423 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.1236682664852082e-05, |
| "loss": 0.7029, |
| "step": 424 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.1130595559097264e-05, |
| "loss": 0.7203, |
| "step": 425 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.102486802569514e-05, |
| "loss": 0.6701, |
| "step": 426 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.0919502805693316e-05, |
| "loss": 0.7236, |
| "step": 427 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.0814502630746232e-05, |
| "loss": 0.6874, |
| "step": 428 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.0709870223044285e-05, |
| "loss": 0.7425, |
| "step": 429 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.0605608295243343e-05, |
| "loss": 0.7986, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.0501719550394337e-05, |
| "loss": 0.7898, |
| "step": 431 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.0398206681873221e-05, |
| "loss": 0.6511, |
| "step": 432 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.0295072373311133e-05, |
| "loss": 0.6351, |
| "step": 433 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.0192319298524835e-05, |
| "loss": 0.7099, |
| "step": 434 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.0089950121447355e-05, |
| "loss": 0.6651, |
| "step": 435 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 9.987967496058992e-06, |
| "loss": 0.6911, |
| "step": 436 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 9.88637406631844e-06, |
| "loss": 0.7179, |
| "step": 437 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 9.785172466094275e-06, |
| "loss": 0.7343, |
| "step": 438 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 9.684365319096669e-06, |
| "loss": 0.7156, |
| "step": 439 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 9.583955238809373e-06, |
| "loss": 0.755, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 9.483944828421912e-06, |
| "loss": 0.7934, |
| "step": 441 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 9.384336680762204e-06, |
| "loss": 0.5811, |
| "step": 442 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 9.285133378229224e-06, |
| "loss": 0.781, |
| "step": 443 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 9.186337492726116e-06, |
| "loss": 0.6609, |
| "step": 444 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 9.087951585593502e-06, |
| "loss": 0.6326, |
| "step": 445 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 8.989978207543088e-06, |
| "loss": 0.7223, |
| "step": 446 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 8.892419898591479e-06, |
| "loss": 0.547, |
| "step": 447 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 8.79527918799444e-06, |
| "loss": 0.7608, |
| "step": 448 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 8.698558594181209e-06, |
| "loss": 0.7017, |
| "step": 449 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 8.602260624689271e-06, |
| "loss": 0.738, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 8.506387776099323e-06, |
| "loss": 0.6746, |
| "step": 451 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 8.41094253397057e-06, |
| "loss": 0.568, |
| "step": 452 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 8.315927372776234e-06, |
| "loss": 0.5854, |
| "step": 453 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 8.221344755839489e-06, |
| "loss": 0.6772, |
| "step": 454 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 8.127197135269524e-06, |
| "loss": 0.6955, |
| "step": 455 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 8.03348695189799e-06, |
| "loss": 0.741, |
| "step": 456 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 7.940216635215733e-06, |
| "loss": 0.584, |
| "step": 457 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 7.847388603309807e-06, |
| "loss": 0.8102, |
| "step": 458 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 7.755005262800744e-06, |
| "loss": 0.7458, |
| "step": 459 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 7.663069008780242e-06, |
| "loss": 0.6538, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 7.571582224748991e-06, |
| "loss": 0.6597, |
| "step": 461 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 7.480547282554914e-06, |
| "loss": 0.6178, |
| "step": 462 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 7.389966542331683e-06, |
| "loss": 0.6598, |
| "step": 463 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 7.2998423524375165e-06, |
| "loss": 0.6472, |
| "step": 464 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 7.210177049394279e-06, |
| "loss": 0.5808, |
| "step": 465 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 7.1209729578269686e-06, |
| "loss": 0.7688, |
| "step": 466 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 7.032232390403376e-06, |
| "loss": 0.6006, |
| "step": 467 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 6.943957647774177e-06, |
| "loss": 0.6599, |
| "step": 468 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 6.856151018513257e-06, |
| "loss": 0.6787, |
| "step": 469 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 6.768814779058405e-06, |
| "loss": 0.6807, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 6.681951193652253e-06, |
| "loss": 0.649, |
| "step": 471 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 6.595562514283643e-06, |
| "loss": 0.7593, |
| "step": 472 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 6.509650980629175e-06, |
| "loss": 0.6135, |
| "step": 473 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 6.4242188199951816e-06, |
| "loss": 0.6612, |
| "step": 474 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 6.339268247259964e-06, |
| "loss": 0.7261, |
| "step": 475 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 6.254801464816387e-06, |
| "loss": 0.7471, |
| "step": 476 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 6.170820662514748e-06, |
| "loss": 0.5885, |
| "step": 477 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 6.087328017606064e-06, |
| "loss": 0.718, |
| "step": 478 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 6.004325694685561e-06, |
| "loss": 0.7714, |
| "step": 479 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 5.9218158456366e-06, |
| "loss": 0.6503, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 5.839800609574853e-06, |
| "loss": 0.6934, |
| "step": 481 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 5.758282112792887e-06, |
| "loss": 0.7706, |
| "step": 482 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 5.67726246870498e-06, |
| "loss": 0.7014, |
| "step": 483 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 5.596743777792404e-06, |
| "loss": 0.6578, |
| "step": 484 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 5.516728127548909e-06, |
| "loss": 0.6324, |
| "step": 485 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 5.437217592426627e-06, |
| "loss": 0.6874, |
| "step": 486 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 5.358214233782286e-06, |
| "loss": 0.6597, |
| "step": 487 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 5.279720099823776e-06, |
| "loss": 0.6759, |
| "step": 488 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 5.2017372255570185e-06, |
| "loss": 0.7586, |
| "step": 489 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 5.12426763273327e-06, |
| "loss": 0.6673, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 5.047313329796643e-06, |
| "loss": 0.6927, |
| "step": 491 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 4.970876311832059e-06, |
| "loss": 0.6708, |
| "step": 492 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 4.894958560513538e-06, |
| "loss": 0.6863, |
| "step": 493 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 4.819562044052811e-06, |
| "loss": 0.6047, |
| "step": 494 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 4.744688717148268e-06, |
| "loss": 0.7012, |
| "step": 495 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 4.670340520934346e-06, |
| "loss": 0.7025, |
| "step": 496 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 4.596519382931138e-06, |
| "loss": 0.7416, |
| "step": 497 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 4.5232272169944554e-06, |
| "loss": 0.6883, |
| "step": 498 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 4.450465923266198e-06, |
| "loss": 0.5197, |
| "step": 499 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 4.378237388125103e-06, |
| "loss": 0.6245, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.81, |
| "eval_loss": 0.7600494623184204, |
| "eval_runtime": 5.4079, |
| "eval_samples_per_second": 1.849, |
| "eval_steps_per_second": 0.37, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 4.306543484137821e-06, |
| "loss": 0.7573, |
| "step": 501 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 4.235386070010391e-06, |
| "loss": 0.6855, |
| "step": 502 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 4.16476699054004e-06, |
| "loss": 0.6273, |
| "step": 503 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 4.094688076567346e-06, |
| "loss": 0.6455, |
| "step": 504 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 4.025151144928796e-06, |
| "loss": 0.7089, |
| "step": 505 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 3.956157998409662e-06, |
| "loss": 0.6158, |
| "step": 506 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 3.88771042569728e-06, |
| "loss": 0.6257, |
| "step": 507 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 3.819810201334661e-06, |
| "loss": 0.6961, |
| "step": 508 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 3.752459085674498e-06, |
| "loss": 0.7702, |
| "step": 509 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 3.6856588248335154e-06, |
| "loss": 0.6489, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 3.6194111506472115e-06, |
| "loss": 0.6174, |
| "step": 511 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 3.55371778062496e-06, |
| "loss": 0.7665, |
| "step": 512 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 3.488580417905471e-06, |
| "loss": 0.7151, |
| "step": 513 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 3.4240007512126443e-06, |
| "loss": 0.7234, |
| "step": 514 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 3.359980454811784e-06, |
| "loss": 0.776, |
| "step": 515 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 3.2965211884662034e-06, |
| "loss": 0.5971, |
| "step": 516 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 3.2336245973941774e-06, |
| "loss": 0.716, |
| "step": 517 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 3.1712923122263065e-06, |
| "loss": 0.6526, |
| "step": 518 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 3.109525948963221e-06, |
| "loss": 0.6179, |
| "step": 519 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 3.048327108933713e-06, |
| "loss": 0.7036, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 2.987697378753196e-06, |
| "loss": 0.6592, |
| "step": 521 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 2.9276383302825813e-06, |
| "loss": 0.7153, |
| "step": 522 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 2.868151520587528e-06, |
| "loss": 0.8054, |
| "step": 523 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 2.8092384918980723e-06, |
| "loss": 0.7869, |
| "step": 524 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 2.750900771568646e-06, |
| "loss": 0.6516, |
| "step": 525 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 2.6931398720384747e-06, |
| "loss": 0.6807, |
| "step": 526 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 2.635957290792379e-06, |
| "loss": 0.6403, |
| "step": 527 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 2.5793545103219324e-06, |
| "loss": 0.6817, |
| "step": 528 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 2.5233329980870387e-06, |
| "loss": 0.6285, |
| "step": 529 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 2.4678942064778944e-06, |
| "loss": 0.6084, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 2.4130395727773175e-06, |
| "loss": 0.6297, |
| "step": 531 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 2.3587705191234932e-06, |
| "loss": 0.7182, |
| "step": 532 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 2.3050884524731088e-06, |
| "loss": 0.7799, |
| "step": 533 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 2.2519947645648688e-06, |
| "loss": 0.6957, |
| "step": 534 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 2.1994908318834246e-06, |
| "loss": 0.716, |
| "step": 535 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 2.1475780156236746e-06, |
| "loss": 0.6591, |
| "step": 536 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 2.0962576616554847e-06, |
| "loss": 0.6581, |
| "step": 537 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 2.0455311004887874e-06, |
| "loss": 0.7222, |
| "step": 538 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.995399647239099e-06, |
| "loss": 0.7222, |
| "step": 539 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.945864601593414e-06, |
| "loss": 0.747, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.896927247776517e-06, |
| "loss": 0.8163, |
| "step": 541 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.8485888545176828e-06, |
| "loss": 0.7345, |
| "step": 542 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.8008506750177844e-06, |
| "loss": 0.6471, |
| "step": 543 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.75371394691681e-06, |
| "loss": 0.622, |
| "step": 544 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.7071798922617705e-06, |
| "loss": 0.7272, |
| "step": 545 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.6612497174750137e-06, |
| "loss": 0.7044, |
| "step": 546 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.6159246133229555e-06, |
| "loss": 0.6701, |
| "step": 547 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.571205754885205e-06, |
| "loss": 0.6877, |
| "step": 548 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.5270943015240984e-06, |
| "loss": 0.6555, |
| "step": 549 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.4835913968546416e-06, |
| "loss": 0.7171, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.4406981687148684e-06, |
| "loss": 0.6398, |
| "step": 551 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.3984157291365902e-06, |
| "loss": 0.7236, |
| "step": 552 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.3567451743165676e-06, |
| "loss": 0.7186, |
| "step": 553 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.3156875845881045e-06, |
| "loss": 0.6843, |
| "step": 554 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.2752440243930185e-06, |
| "loss": 0.6653, |
| "step": 555 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.2354155422540592e-06, |
| "loss": 0.5802, |
| "step": 556 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.1962031707477257e-06, |
| "loss": 0.6668, |
| "step": 557 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.1576079264774832e-06, |
| "loss": 0.7334, |
| "step": 558 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.1196308100474256e-06, |
| "loss": 0.7972, |
| "step": 559 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.0822728060363136e-06, |
| "loss": 0.7289, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.0455348829720657e-06, |
| "loss": 0.7172, |
| "step": 561 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.0094179933066411e-06, |
| "loss": 0.7979, |
| "step": 562 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 9.739230733913473e-07, |
| "loss": 0.6088, |
| "step": 563 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 9.390510434525657e-07, |
| "loss": 0.7429, |
| "step": 564 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 9.04802807567895e-07, |
| "loss": 0.8321, |
| "step": 565 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 8.711792536427094e-07, |
| "loss": 0.788, |
| "step": 566 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 8.381812533871436e-07, |
| "loss": 0.669, |
| "step": 567 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 8.058096622934885e-07, |
| "loss": 0.6787, |
| "step": 568 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 7.740653196140119e-07, |
| "loss": 0.6903, |
| "step": 569 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 7.429490483392088e-07, |
| "loss": 0.6586, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 7.124616551764524e-07, |
| "loss": 0.6599, |
| "step": 571 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 6.826039305290937e-07, |
| "loss": 0.833, |
| "step": 572 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 6.533766484759562e-07, |
| "loss": 0.5695, |
| "step": 573 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 6.247805667512762e-07, |
| "loss": 0.7149, |
| "step": 574 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 5.968164267250497e-07, |
| "loss": 0.6131, |
| "step": 575 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 5.694849533838282e-07, |
| "loss": 0.6447, |
| "step": 576 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 5.427868553119081e-07, |
| "loss": 0.724, |
| "step": 577 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 5.167228246729578e-07, |
| "loss": 0.6193, |
| "step": 578 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 4.91293537192089e-07, |
| "loss": 0.7465, |
| "step": 579 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 4.6649965213832467e-07, |
| "loss": 0.6532, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 4.423418123075118e-07, |
| "loss": 0.5808, |
| "step": 581 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 4.1882064400566024e-07, |
| "loss": 0.5762, |
| "step": 582 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 3.9593675703269163e-07, |
| "loss": 0.608, |
| "step": 583 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 3.736907446666521e-07, |
| "loss": 0.6646, |
| "step": 584 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 3.5208318364830517e-07, |
| "loss": 0.6895, |
| "step": 585 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 3.311146341662075e-07, |
| "loss": 0.6614, |
| "step": 586 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 3.107856398421566e-07, |
| "loss": 0.6311, |
| "step": 587 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 2.910967277171245e-07, |
| "loss": 0.8198, |
| "step": 588 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 2.7204840823757125e-07, |
| "loss": 0.688, |
| "step": 589 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 2.536411752422252e-07, |
| "loss": 0.7146, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 2.3587550594927897e-07, |
| "loss": 0.7203, |
| "step": 591 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 2.1875186094400813e-07, |
| "loss": 0.801, |
| "step": 592 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 2.022706841668387e-07, |
| "loss": 0.7747, |
| "step": 593 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.8643240290183716e-07, |
| "loss": 0.7338, |
| "step": 594 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.7123742776563036e-07, |
| "loss": 0.702, |
| "step": 595 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.5668615269675834e-07, |
| "loss": 0.6807, |
| "step": 596 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.4277895494546334e-07, |
| "loss": 0.6561, |
| "step": 597 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.2951619506391676e-07, |
| "loss": 0.7484, |
| "step": 598 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.1689821689685465e-07, |
| "loss": 0.5526, |
| "step": 599 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.0492534757267913e-07, |
| "loss": 0.7466, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.97, |
| "eval_loss": 0.7597864866256714, |
| "eval_runtime": 5.3858, |
| "eval_samples_per_second": 1.857, |
| "eval_steps_per_second": 0.371, |
| "step": 600 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 618, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 1, |
| "save_steps": 100, |
| "total_flos": 1.5123582321426432e+18, |
| "train_batch_size": 4, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|