| { |
| "best_global_step": 3924, |
| "best_metric": 3.390385389328003, |
| "best_model_checkpoint": "sindhibert_session5/checkpoint-3924", |
| "epoch": 2.0, |
| "eval_steps": 1962, |
| "global_step": 3924, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.05098139179199592, |
| "grad_norm": 5.245308876037598, |
| "learning_rate": 2.1063829787234044e-06, |
| "loss": 14.123677978515625, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.10196278358399184, |
| "grad_norm": 5.646098613739014, |
| "learning_rate": 4.23404255319149e-06, |
| "loss": 14.066470947265625, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.15294417537598776, |
| "grad_norm": 5.477426528930664, |
| "learning_rate": 4.99628768445363e-06, |
| "loss": 14.01585693359375, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.20392556716798368, |
| "grad_norm": 5.2961344718933105, |
| "learning_rate": 4.975657001043815e-06, |
| "loss": 13.965966796875, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.2549069589599796, |
| "grad_norm": 5.533503532409668, |
| "learning_rate": 4.937082682936824e-06, |
| "loss": 13.97388671875, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.3058883507519755, |
| "grad_norm": 5.314619064331055, |
| "learning_rate": 4.8808443179239025e-06, |
| "loss": 13.95988525390625, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.35686974254397147, |
| "grad_norm": 5.3324384689331055, |
| "learning_rate": 4.807349523327375e-06, |
| "loss": 13.953388671875, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.40785113433596737, |
| "grad_norm": 5.467737674713135, |
| "learning_rate": 4.717130991578312e-06, |
| "loss": 13.9411962890625, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.45883252612796327, |
| "grad_norm": 5.45510721206665, |
| "learning_rate": 4.610842629246138e-06, |
| "loss": 13.934332275390625, |
| "step": 900 |
| }, |
| { |
| "epoch": 0.5098139179199592, |
| "grad_norm": 5.6341962814331055, |
| "learning_rate": 4.489254817504595e-06, |
| "loss": 13.926766357421876, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.5607953097119551, |
| "grad_norm": 5.584266662597656, |
| "learning_rate": 4.353248828386359e-06, |
| "loss": 13.91165283203125, |
| "step": 1100 |
| }, |
| { |
| "epoch": 0.611776701503951, |
| "grad_norm": 5.35775899887085, |
| "learning_rate": 4.203810437297447e-06, |
| "loss": 13.915504150390625, |
| "step": 1200 |
| }, |
| { |
| "epoch": 0.6627580932959469, |
| "grad_norm": 5.549437999725342, |
| "learning_rate": 4.042022778088111e-06, |
| "loss": 13.945462646484375, |
| "step": 1300 |
| }, |
| { |
| "epoch": 0.7137394850879429, |
| "grad_norm": 5.455720901489258, |
| "learning_rate": 3.869058492466847e-06, |
| "loss": 13.89906494140625, |
| "step": 1400 |
| }, |
| { |
| "epoch": 0.7647208768799388, |
| "grad_norm": 5.856232166290283, |
| "learning_rate": 3.6861712306588434e-06, |
| "loss": 13.905211181640626, |
| "step": 1500 |
| }, |
| { |
| "epoch": 0.8157022686719347, |
| "grad_norm": 5.481319904327393, |
| "learning_rate": 3.4946865649123025e-06, |
| "loss": 13.896219482421875, |
| "step": 1600 |
| }, |
| { |
| "epoch": 0.8666836604639306, |
| "grad_norm": 5.278250217437744, |
| "learning_rate": 3.2959923817118296e-06, |
| "loss": 13.906019287109375, |
| "step": 1700 |
| }, |
| { |
| "epoch": 0.9176650522559265, |
| "grad_norm": 5.768893718719482, |
| "learning_rate": 3.091528822336405e-06, |
| "loss": 13.93161376953125, |
| "step": 1800 |
| }, |
| { |
| "epoch": 0.9686464440479226, |
| "grad_norm": 5.366046905517578, |
| "learning_rate": 2.8827778446730764e-06, |
| "loss": 13.906478271484374, |
| "step": 1900 |
| }, |
| { |
| "epoch": 1.0, |
| "eval_loss": 3.396049737930298, |
| "eval_runtime": 7.952, |
| "eval_samples_per_second": 637.827, |
| "eval_steps_per_second": 10.06, |
| "step": 1962 |
| }, |
| { |
| "epoch": 1.0193729288809585, |
| "grad_norm": 5.584351062774658, |
| "learning_rate": 2.6712524819426355e-06, |
| "loss": 13.823892822265625, |
| "step": 2000 |
| }, |
| { |
| "epoch": 1.0703543206729544, |
| "grad_norm": 5.338160037994385, |
| "learning_rate": 2.458485876190375e-06, |
| "loss": 13.90267333984375, |
| "step": 2100 |
| }, |
| { |
| "epoch": 1.1213357124649503, |
| "grad_norm": 5.378915309906006, |
| "learning_rate": 2.246020166027488e-06, |
| "loss": 13.9045458984375, |
| "step": 2200 |
| }, |
| { |
| "epoch": 1.1723171042569462, |
| "grad_norm": 5.420534133911133, |
| "learning_rate": 2.0353953091650912e-06, |
| "loss": 13.91886474609375, |
| "step": 2300 |
| }, |
| { |
| "epoch": 1.2232984960489421, |
| "grad_norm": 5.299575328826904, |
| "learning_rate": 1.82813792075548e-06, |
| "loss": 13.879603271484376, |
| "step": 2400 |
| }, |
| { |
| "epoch": 1.274279887840938, |
| "grad_norm": 5.256579875946045, |
| "learning_rate": 1.6257502084406458e-06, |
| "loss": 13.92048828125, |
| "step": 2500 |
| }, |
| { |
| "epoch": 1.325261279632934, |
| "grad_norm": 5.846710205078125, |
| "learning_rate": 1.4296990843071351e-06, |
| "loss": 13.912366943359375, |
| "step": 2600 |
| }, |
| { |
| "epoch": 1.3762426714249298, |
| "grad_norm": 5.547558307647705, |
| "learning_rate": 1.2414055326641378e-06, |
| "loss": 13.929051513671874, |
| "step": 2700 |
| }, |
| { |
| "epoch": 1.4272240632169257, |
| "grad_norm": 5.693734169006348, |
| "learning_rate": 1.062234310707458e-06, |
| "loss": 13.929571533203125, |
| "step": 2800 |
| }, |
| { |
| "epoch": 1.4782054550089216, |
| "grad_norm": 5.414492607116699, |
| "learning_rate": 8.934840567192496e-07, |
| "loss": 13.91981201171875, |
| "step": 2900 |
| }, |
| { |
| "epoch": 1.5291868468009175, |
| "grad_norm": 5.501009941101074, |
| "learning_rate": 7.363778774995834e-07, |
| "loss": 13.91615478515625, |
| "step": 3000 |
| }, |
| { |
| "epoch": 1.5801682385929134, |
| "grad_norm": 5.355737686157227, |
| "learning_rate": 5.920544832524166e-07, |
| "loss": 13.9007421875, |
| "step": 3100 |
| }, |
| { |
| "epoch": 1.6311496303849093, |
| "grad_norm": 5.376833915710449, |
| "learning_rate": 4.61559934180571e-07, |
| "loss": 13.915396728515624, |
| "step": 3200 |
| }, |
| { |
| "epoch": 1.6821310221769055, |
| "grad_norm": 5.302165985107422, |
| "learning_rate": 3.4584005861063974e-07, |
| "loss": 13.94998046875, |
| "step": 3300 |
| }, |
| { |
| "epoch": 1.7331124139689014, |
| "grad_norm": 5.571377277374268, |
| "learning_rate": 2.4573359760148354e-07, |
| "loss": 13.98015380859375, |
| "step": 3400 |
| }, |
| { |
| "epoch": 1.7840938057608973, |
| "grad_norm": 5.618070125579834, |
| "learning_rate": 1.619661257243843e-07, |
| "loss": 13.96041015625, |
| "step": 3500 |
| }, |
| { |
| "epoch": 1.8350751975528932, |
| "grad_norm": 5.216390132904053, |
| "learning_rate": 9.514479207723276e-08, |
| "loss": 13.9774560546875, |
| "step": 3600 |
| }, |
| { |
| "epoch": 1.886056589344889, |
| "grad_norm": 5.458727836608887, |
| "learning_rate": 4.575391965002174e-08, |
| "loss": 14.00027587890625, |
| "step": 3700 |
| }, |
| { |
| "epoch": 1.9370379811368852, |
| "grad_norm": 5.513509273529053, |
| "learning_rate": 1.4151494937582056e-08, |
| "loss": 14.0021728515625, |
| "step": 3800 |
| }, |
| { |
| "epoch": 1.988019372928881, |
| "grad_norm": 5.200069904327393, |
| "learning_rate": 5.665732429496751e-10, |
| "loss": 13.960343017578126, |
| "step": 3900 |
| }, |
| { |
| "epoch": 2.0, |
| "eval_loss": 3.390385389328003, |
| "eval_runtime": 7.9843, |
| "eval_samples_per_second": 635.249, |
| "eval_steps_per_second": 10.02, |
| "step": 3924 |
| } |
| ], |
| "logging_steps": 100, |
| "max_steps": 3924, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 2, |
| "save_steps": 1962, |
| "stateful_callbacks": { |
| "EarlyStoppingCallback": { |
| "args": { |
| "early_stopping_patience": 3, |
| "early_stopping_threshold": 0.0 |
| }, |
| "attributes": { |
| "early_stopping_patience_counter": 0 |
| } |
| }, |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 2.643322074019246e+17, |
| "train_batch_size": 64, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|