| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 0.9997597886139803, |
| "eval_steps": 500, |
| "global_step": 2081, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.024021138601969732, |
| "grad_norm": 0.2971479594707489, |
| "learning_rate": 2e-05, |
| "loss": 2.1978, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.048042277203939464, |
| "grad_norm": 0.16026291251182556, |
| "learning_rate": 1.9507631708517972e-05, |
| "loss": 1.7812, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.0720634158059092, |
| "grad_norm": 0.13299140334129333, |
| "learning_rate": 1.9015263417035946e-05, |
| "loss": 1.6164, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.09608455440787893, |
| "grad_norm": 0.13369913399219513, |
| "learning_rate": 1.8522895125553916e-05, |
| "loss": 1.6454, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.12010569300984866, |
| "grad_norm": 0.13563945889472961, |
| "learning_rate": 1.8030526834071887e-05, |
| "loss": 1.6126, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.1441268316118184, |
| "grad_norm": 0.14658796787261963, |
| "learning_rate": 1.7538158542589857e-05, |
| "loss": 1.6201, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.16814797021378813, |
| "grad_norm": 0.241877481341362, |
| "learning_rate": 1.704579025110783e-05, |
| "loss": 1.6169, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.19216910881575786, |
| "grad_norm": 0.1964091956615448, |
| "learning_rate": 1.6553421959625802e-05, |
| "loss": 1.587, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.2161902474177276, |
| "grad_norm": 0.21258428692817688, |
| "learning_rate": 1.6061053668143772e-05, |
| "loss": 1.5908, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.24021138601969733, |
| "grad_norm": 0.17729483544826508, |
| "learning_rate": 1.5568685376661746e-05, |
| "loss": 1.5925, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.2642325246216671, |
| "grad_norm": 0.19052956998348236, |
| "learning_rate": 1.5076317085179715e-05, |
| "loss": 1.6462, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.2882536632236368, |
| "grad_norm": 0.20297902822494507, |
| "learning_rate": 1.4583948793697687e-05, |
| "loss": 1.6039, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.3122748018256065, |
| "grad_norm": 0.268765389919281, |
| "learning_rate": 1.4091580502215658e-05, |
| "loss": 1.5687, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.33629594042757627, |
| "grad_norm": 0.16921205818653107, |
| "learning_rate": 1.359921221073363e-05, |
| "loss": 1.576, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.360317079029546, |
| "grad_norm": 0.2648881673812866, |
| "learning_rate": 1.3106843919251602e-05, |
| "loss": 1.6265, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.3843382176315157, |
| "grad_norm": 0.17966234683990479, |
| "learning_rate": 1.2614475627769574e-05, |
| "loss": 1.626, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.40835935623348546, |
| "grad_norm": 0.33203789591789246, |
| "learning_rate": 1.2122107336287543e-05, |
| "loss": 1.5763, |
| "step": 850 |
| }, |
| { |
| "epoch": 0.4323804948354552, |
| "grad_norm": 0.24411220848560333, |
| "learning_rate": 1.1629739044805515e-05, |
| "loss": 1.5917, |
| "step": 900 |
| }, |
| { |
| "epoch": 0.45640163343742496, |
| "grad_norm": 0.23752576112747192, |
| "learning_rate": 1.1137370753323486e-05, |
| "loss": 1.5612, |
| "step": 950 |
| }, |
| { |
| "epoch": 0.48042277203939465, |
| "grad_norm": 0.2846772372722626, |
| "learning_rate": 1.0645002461841458e-05, |
| "loss": 1.5507, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.5044439106413644, |
| "grad_norm": 0.24864733219146729, |
| "learning_rate": 1.015263417035943e-05, |
| "loss": 1.5805, |
| "step": 1050 |
| }, |
| { |
| "epoch": 0.5284650492433342, |
| "grad_norm": 0.24564430117607117, |
| "learning_rate": 9.6602658788774e-06, |
| "loss": 1.5815, |
| "step": 1100 |
| }, |
| { |
| "epoch": 0.5524861878453039, |
| "grad_norm": 0.21580535173416138, |
| "learning_rate": 9.167897587395373e-06, |
| "loss": 1.5688, |
| "step": 1150 |
| }, |
| { |
| "epoch": 0.5765073264472736, |
| "grad_norm": 0.2774423062801361, |
| "learning_rate": 8.675529295913345e-06, |
| "loss": 1.5762, |
| "step": 1200 |
| }, |
| { |
| "epoch": 0.6005284650492433, |
| "grad_norm": 0.238305926322937, |
| "learning_rate": 8.183161004431315e-06, |
| "loss": 1.5438, |
| "step": 1250 |
| }, |
| { |
| "epoch": 0.624549603651213, |
| "grad_norm": 0.2373654991388321, |
| "learning_rate": 7.690792712949286e-06, |
| "loss": 1.5666, |
| "step": 1300 |
| }, |
| { |
| "epoch": 0.6485707422531828, |
| "grad_norm": 0.2837679386138916, |
| "learning_rate": 7.198424421467258e-06, |
| "loss": 1.5976, |
| "step": 1350 |
| }, |
| { |
| "epoch": 0.6725918808551525, |
| "grad_norm": 0.2412709891796112, |
| "learning_rate": 6.70605612998523e-06, |
| "loss": 1.5558, |
| "step": 1400 |
| }, |
| { |
| "epoch": 0.6966130194571223, |
| "grad_norm": 0.23326246440410614, |
| "learning_rate": 6.213687838503201e-06, |
| "loss": 1.5499, |
| "step": 1450 |
| }, |
| { |
| "epoch": 0.720634158059092, |
| "grad_norm": 0.19991439580917358, |
| "learning_rate": 5.721319547021172e-06, |
| "loss": 1.5891, |
| "step": 1500 |
| }, |
| { |
| "epoch": 0.7446552966610618, |
| "grad_norm": 0.2734942138195038, |
| "learning_rate": 5.228951255539144e-06, |
| "loss": 1.6094, |
| "step": 1550 |
| }, |
| { |
| "epoch": 0.7686764352630314, |
| "grad_norm": 0.22683735191822052, |
| "learning_rate": 4.7365829640571156e-06, |
| "loss": 1.5835, |
| "step": 1600 |
| }, |
| { |
| "epoch": 0.7926975738650012, |
| "grad_norm": 0.2708589434623718, |
| "learning_rate": 4.244214672575087e-06, |
| "loss": 1.5581, |
| "step": 1650 |
| }, |
| { |
| "epoch": 0.8167187124669709, |
| "grad_norm": 0.20286928117275238, |
| "learning_rate": 3.751846381093058e-06, |
| "loss": 1.5882, |
| "step": 1700 |
| }, |
| { |
| "epoch": 0.8407398510689407, |
| "grad_norm": 0.2391301989555359, |
| "learning_rate": 3.2594780896110296e-06, |
| "loss": 1.6212, |
| "step": 1750 |
| }, |
| { |
| "epoch": 0.8647609896709104, |
| "grad_norm": 0.3111497461795807, |
| "learning_rate": 2.767109798129001e-06, |
| "loss": 1.5544, |
| "step": 1800 |
| }, |
| { |
| "epoch": 0.8887821282728802, |
| "grad_norm": 0.18287725746631622, |
| "learning_rate": 2.2747415066469722e-06, |
| "loss": 1.5776, |
| "step": 1850 |
| }, |
| { |
| "epoch": 0.9128032668748499, |
| "grad_norm": 0.2419891357421875, |
| "learning_rate": 1.7823732151649436e-06, |
| "loss": 1.5666, |
| "step": 1900 |
| }, |
| { |
| "epoch": 0.9368244054768196, |
| "grad_norm": 0.21688586473464966, |
| "learning_rate": 1.290004923682915e-06, |
| "loss": 1.5855, |
| "step": 1950 |
| }, |
| { |
| "epoch": 0.9608455440787893, |
| "grad_norm": 0.28701093792915344, |
| "learning_rate": 7.976366322008864e-07, |
| "loss": 1.5673, |
| "step": 2000 |
| }, |
| { |
| "epoch": 0.984866682680759, |
| "grad_norm": 0.24174277484416962, |
| "learning_rate": 3.0526834071885774e-07, |
| "loss": 1.553, |
| "step": 2050 |
| } |
| ], |
| "logging_steps": 50, |
| "max_steps": 2081, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 1, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 5.4349814738681856e+17, |
| "train_batch_size": 4, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|