| { | |
| "best_metric": 0.12343376874923706, | |
| "best_model_checkpoint": "multilingual-e5-small-aligned-readability-20241214-new/checkpoint-23439", | |
| "epoch": 3.0, | |
| "eval_steps": 500, | |
| "global_step": 23439, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.06399590426212723, | |
| "grad_norm": 2.0385684967041016, | |
| "learning_rate": 4.8933401595631215e-05, | |
| "loss": 0.286, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.12799180852425446, | |
| "grad_norm": 1.2255290746688843, | |
| "learning_rate": 4.786680319126243e-05, | |
| "loss": 0.1864, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.19198771278638166, | |
| "grad_norm": 1.411687970161438, | |
| "learning_rate": 4.680020478689364e-05, | |
| "loss": 0.1751, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.2559836170485089, | |
| "grad_norm": 0.9454971551895142, | |
| "learning_rate": 4.573360638252485e-05, | |
| "loss": 0.1695, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.3199795213106361, | |
| "grad_norm": 1.2582781314849854, | |
| "learning_rate": 4.4667007978156063e-05, | |
| "loss": 0.1666, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.3839754255727633, | |
| "grad_norm": 1.103311538696289, | |
| "learning_rate": 4.360040957378728e-05, | |
| "loss": 0.1645, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.4479713298348906, | |
| "grad_norm": 1.237697958946228, | |
| "learning_rate": 4.2533811169418495e-05, | |
| "loss": 0.154, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.5119672340970178, | |
| "grad_norm": 1.2233279943466187, | |
| "learning_rate": 4.146721276504971e-05, | |
| "loss": 0.1554, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.575963138359145, | |
| "grad_norm": 1.345629334449768, | |
| "learning_rate": 4.040061436068092e-05, | |
| "loss": 0.1585, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.6399590426212722, | |
| "grad_norm": 1.7818764448165894, | |
| "learning_rate": 3.933401595631213e-05, | |
| "loss": 0.1523, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.7039549468833994, | |
| "grad_norm": 2.2489941120147705, | |
| "learning_rate": 3.8267417551943344e-05, | |
| "loss": 0.1546, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.7679508511455266, | |
| "grad_norm": 0.6691417098045349, | |
| "learning_rate": 3.7200819147574556e-05, | |
| "loss": 0.1485, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.831946755407654, | |
| "grad_norm": 1.420074462890625, | |
| "learning_rate": 3.613422074320577e-05, | |
| "loss": 0.1479, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.8959426596697811, | |
| "grad_norm": 1.6003996133804321, | |
| "learning_rate": 3.506762233883698e-05, | |
| "loss": 0.1435, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.9599385639319084, | |
| "grad_norm": 1.0591192245483398, | |
| "learning_rate": 3.400102393446819e-05, | |
| "loss": 0.1484, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_loss": 0.13237929344177246, | |
| "eval_mse": 0.1323792923084062, | |
| "eval_runtime": 106.0386, | |
| "eval_samples_per_second": 1717.403, | |
| "eval_steps_per_second": 214.677, | |
| "step": 7813 | |
| }, | |
| { | |
| "epoch": 1.0239344681940357, | |
| "grad_norm": 0.48294901847839355, | |
| "learning_rate": 3.293442553009941e-05, | |
| "loss": 0.1372, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 1.0879303724561629, | |
| "grad_norm": 1.516916036605835, | |
| "learning_rate": 3.1867827125730624e-05, | |
| "loss": 0.1237, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 1.15192627671829, | |
| "grad_norm": 1.1309595108032227, | |
| "learning_rate": 3.0801228721361836e-05, | |
| "loss": 0.1212, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 1.2159221809804173, | |
| "grad_norm": 1.1831127405166626, | |
| "learning_rate": 2.9734630316993045e-05, | |
| "loss": 0.1217, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 1.2799180852425445, | |
| "grad_norm": 1.3023440837860107, | |
| "learning_rate": 2.866803191262426e-05, | |
| "loss": 0.1233, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 1.3439139895046717, | |
| "grad_norm": 0.9876078963279724, | |
| "learning_rate": 2.7601433508255476e-05, | |
| "loss": 0.1213, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 1.4079098937667989, | |
| "grad_norm": 0.9371439218521118, | |
| "learning_rate": 2.6534835103886685e-05, | |
| "loss": 0.1232, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 1.471905798028926, | |
| "grad_norm": 0.942789614200592, | |
| "learning_rate": 2.54682366995179e-05, | |
| "loss": 0.1202, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 1.5359017022910533, | |
| "grad_norm": 1.451653003692627, | |
| "learning_rate": 2.4401638295149112e-05, | |
| "loss": 0.1181, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 1.5998976065531805, | |
| "grad_norm": 0.7980997562408447, | |
| "learning_rate": 2.3335039890780325e-05, | |
| "loss": 0.1206, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 1.6638935108153077, | |
| "grad_norm": 1.9100792407989502, | |
| "learning_rate": 2.2268441486411537e-05, | |
| "loss": 0.1186, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 1.727889415077435, | |
| "grad_norm": 1.6824227571487427, | |
| "learning_rate": 2.120184308204275e-05, | |
| "loss": 0.1207, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 1.7918853193395623, | |
| "grad_norm": 1.731228232383728, | |
| "learning_rate": 2.0135244677673965e-05, | |
| "loss": 0.1192, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 1.8558812236016895, | |
| "grad_norm": 1.039025902748108, | |
| "learning_rate": 1.9068646273305177e-05, | |
| "loss": 0.1155, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 1.9198771278638167, | |
| "grad_norm": 0.7578993439674377, | |
| "learning_rate": 1.800204786893639e-05, | |
| "loss": 0.1173, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 1.983873032125944, | |
| "grad_norm": 1.1431925296783447, | |
| "learning_rate": 1.69354494645676e-05, | |
| "loss": 0.1157, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_loss": 0.12407750636339188, | |
| "eval_mse": 0.12407751065680599, | |
| "eval_runtime": 102.1252, | |
| "eval_samples_per_second": 1783.214, | |
| "eval_steps_per_second": 222.903, | |
| "step": 15626 | |
| }, | |
| { | |
| "epoch": 2.0478689363880713, | |
| "grad_norm": 1.0193284749984741, | |
| "learning_rate": 1.5868851060198814e-05, | |
| "loss": 0.1032, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 2.1118648406501985, | |
| "grad_norm": 0.8400812745094299, | |
| "learning_rate": 1.480225265583003e-05, | |
| "loss": 0.0983, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 2.1758607449123257, | |
| "grad_norm": 0.8080986738204956, | |
| "learning_rate": 1.3735654251461241e-05, | |
| "loss": 0.0991, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 2.239856649174453, | |
| "grad_norm": 1.2077598571777344, | |
| "learning_rate": 1.2669055847092454e-05, | |
| "loss": 0.0984, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 2.30385255343658, | |
| "grad_norm": 1.0903464555740356, | |
| "learning_rate": 1.1602457442723666e-05, | |
| "loss": 0.0966, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 2.3678484576987073, | |
| "grad_norm": 1.89614737033844, | |
| "learning_rate": 1.053585903835488e-05, | |
| "loss": 0.0997, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 2.4318443619608345, | |
| "grad_norm": 0.732280969619751, | |
| "learning_rate": 9.469260633986092e-06, | |
| "loss": 0.0969, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 2.4958402662229617, | |
| "grad_norm": 0.8635444641113281, | |
| "learning_rate": 8.402662229617304e-06, | |
| "loss": 0.0972, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 2.559836170485089, | |
| "grad_norm": 1.2498939037322998, | |
| "learning_rate": 7.336063825248518e-06, | |
| "loss": 0.0971, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 2.623832074747216, | |
| "grad_norm": 1.8945280313491821, | |
| "learning_rate": 6.26946542087973e-06, | |
| "loss": 0.0965, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 2.6878279790093433, | |
| "grad_norm": 0.8438006043434143, | |
| "learning_rate": 5.202867016510943e-06, | |
| "loss": 0.0964, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 2.7518238832714705, | |
| "grad_norm": 0.7478394508361816, | |
| "learning_rate": 4.1362686121421564e-06, | |
| "loss": 0.0987, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 2.8158197875335977, | |
| "grad_norm": 1.5676864385604858, | |
| "learning_rate": 3.069670207773369e-06, | |
| "loss": 0.097, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 2.879815691795725, | |
| "grad_norm": 1.0723648071289062, | |
| "learning_rate": 2.003071803404582e-06, | |
| "loss": 0.0984, | |
| "step": 22500 | |
| }, | |
| { | |
| "epoch": 2.943811596057852, | |
| "grad_norm": 0.886972963809967, | |
| "learning_rate": 9.364733990357951e-07, | |
| "loss": 0.096, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_loss": 0.12343376874923706, | |
| "eval_mse": 0.12343375904401642, | |
| "eval_runtime": 98.083, | |
| "eval_samples_per_second": 1856.703, | |
| "eval_steps_per_second": 232.089, | |
| "step": 23439 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "step": 23439, | |
| "total_flos": 4.9403660544e+16, | |
| "train_loss": 0.12799415017126403, | |
| "train_runtime": 3304.0151, | |
| "train_samples_per_second": 907.986, | |
| "train_steps_per_second": 7.094 | |
| } | |
| ], | |
| "logging_steps": 500, | |
| "max_steps": 23439, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 4.9403660544e+16, | |
| "train_batch_size": 128, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |