| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 266, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.018832391713747645, | |
| "grad_norm": 1.1089932918548584, | |
| "learning_rate": 1.791044776119403e-06, | |
| "loss": 1.365, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.03766478342749529, | |
| "grad_norm": 0.8269509673118591, | |
| "learning_rate": 4.029850746268657e-06, | |
| "loss": 1.3307, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.05649717514124294, | |
| "grad_norm": 0.6606281995773315, | |
| "learning_rate": 6.268656716417911e-06, | |
| "loss": 1.2869, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.07532956685499058, | |
| "grad_norm": 0.5472633242607117, | |
| "learning_rate": 8.507462686567164e-06, | |
| "loss": 1.2774, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.09416195856873823, | |
| "grad_norm": 0.6873501539230347, | |
| "learning_rate": 1.0746268656716418e-05, | |
| "loss": 1.2985, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.11299435028248588, | |
| "grad_norm": 0.5600975751876831, | |
| "learning_rate": 1.2985074626865672e-05, | |
| "loss": 1.217, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.1318267419962335, | |
| "grad_norm": 0.7250403761863708, | |
| "learning_rate": 1.5223880597014927e-05, | |
| "loss": 1.233, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.15065913370998116, | |
| "grad_norm": 0.5497453808784485, | |
| "learning_rate": 1.746268656716418e-05, | |
| "loss": 1.2169, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.1694915254237288, | |
| "grad_norm": 0.6821231842041016, | |
| "learning_rate": 1.9701492537313435e-05, | |
| "loss": 1.1701, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.18832391713747645, | |
| "grad_norm": 0.5772640705108643, | |
| "learning_rate": 2.194029850746269e-05, | |
| "loss": 1.1552, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.2071563088512241, | |
| "grad_norm": 0.5170062780380249, | |
| "learning_rate": 2.417910447761194e-05, | |
| "loss": 1.1947, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.22598870056497175, | |
| "grad_norm": 0.5546678304672241, | |
| "learning_rate": 2.6417910447761193e-05, | |
| "loss": 1.0869, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.2448210922787194, | |
| "grad_norm": 0.6221190690994263, | |
| "learning_rate": 2.8656716417910447e-05, | |
| "loss": 1.1683, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.263653483992467, | |
| "grad_norm": 0.7900596857070923, | |
| "learning_rate": 2.9999814384857414e-05, | |
| "loss": 1.1623, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.2824858757062147, | |
| "grad_norm": 0.5770227909088135, | |
| "learning_rate": 2.99977262672591e-05, | |
| "loss": 1.1033, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.3013182674199623, | |
| "grad_norm": 0.5991471409797668, | |
| "learning_rate": 2.9993318337195934e-05, | |
| "loss": 1.1221, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.32015065913371, | |
| "grad_norm": 0.6063827872276306, | |
| "learning_rate": 2.998659127647601e-05, | |
| "loss": 1.0186, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.3389830508474576, | |
| "grad_norm": 0.690599262714386, | |
| "learning_rate": 2.9977546125625006e-05, | |
| "loss": 1.0455, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.3578154425612053, | |
| "grad_norm": 0.681769073009491, | |
| "learning_rate": 2.9966184283725243e-05, | |
| "loss": 1.014, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.3766478342749529, | |
| "grad_norm": 0.6798896789550781, | |
| "learning_rate": 2.9952507508199284e-05, | |
| "loss": 0.9816, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.3954802259887006, | |
| "grad_norm": 0.6588433384895325, | |
| "learning_rate": 2.9936517914538085e-05, | |
| "loss": 1.0095, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.4143126177024482, | |
| "grad_norm": 0.7236828207969666, | |
| "learning_rate": 2.991821797597379e-05, | |
| "loss": 0.9913, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.4331450094161959, | |
| "grad_norm": 0.6340996026992798, | |
| "learning_rate": 2.989761052309717e-05, | |
| "loss": 0.9752, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.4519774011299435, | |
| "grad_norm": 0.7232135534286499, | |
| "learning_rate": 2.98746987434198e-05, | |
| "loss": 0.965, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.4708097928436911, | |
| "grad_norm": 0.7878739237785339, | |
| "learning_rate": 2.9849486180881017e-05, | |
| "loss": 0.9779, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.4896421845574388, | |
| "grad_norm": 0.7223129868507385, | |
| "learning_rate": 2.982197673529976e-05, | |
| "loss": 0.9159, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.5084745762711864, | |
| "grad_norm": 0.7257573008537292, | |
| "learning_rate": 2.9792174661771332e-05, | |
| "loss": 0.9403, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.527306967984934, | |
| "grad_norm": 0.8936464190483093, | |
| "learning_rate": 2.976008457000927e-05, | |
| "loss": 0.8969, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.5461393596986818, | |
| "grad_norm": 0.9090555906295776, | |
| "learning_rate": 2.972571142363229e-05, | |
| "loss": 0.8963, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.5649717514124294, | |
| "grad_norm": 0.8844392895698547, | |
| "learning_rate": 2.9689060539396545e-05, | |
| "loss": 0.8657, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.583804143126177, | |
| "grad_norm": 0.8749438524246216, | |
| "learning_rate": 2.9650137586373236e-05, | |
| "loss": 0.8104, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.6026365348399246, | |
| "grad_norm": 0.9088373184204102, | |
| "learning_rate": 2.9608948585071747e-05, | |
| "loss": 0.842, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.6214689265536724, | |
| "grad_norm": 0.8506441116333008, | |
| "learning_rate": 2.9565499906508376e-05, | |
| "loss": 0.8132, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.64030131826742, | |
| "grad_norm": 0.9221051335334778, | |
| "learning_rate": 2.9519798271220915e-05, | |
| "loss": 0.7725, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.6591337099811676, | |
| "grad_norm": 1.0185768604278564, | |
| "learning_rate": 2.9471850748229117e-05, | |
| "loss": 0.7874, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.6779661016949152, | |
| "grad_norm": 0.8905505537986755, | |
| "learning_rate": 2.9421664753941277e-05, | |
| "loss": 0.7491, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.696798493408663, | |
| "grad_norm": 0.9472977519035339, | |
| "learning_rate": 2.936924805100708e-05, | |
| "loss": 0.7542, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.7156308851224106, | |
| "grad_norm": 1.000342845916748, | |
| "learning_rate": 2.9314608747116894e-05, | |
| "loss": 0.7364, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.7344632768361582, | |
| "grad_norm": 0.8593543171882629, | |
| "learning_rate": 2.9257755293747702e-05, | |
| "loss": 0.7712, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.7532956685499058, | |
| "grad_norm": 0.9145693778991699, | |
| "learning_rate": 2.919869648485582e-05, | |
| "loss": 0.7399, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.7721280602636534, | |
| "grad_norm": 0.9073010683059692, | |
| "learning_rate": 2.9137441455516706e-05, | |
| "loss": 0.6409, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.7909604519774012, | |
| "grad_norm": 0.8353155851364136, | |
| "learning_rate": 2.9073999680511935e-05, | |
| "loss": 0.738, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.8097928436911488, | |
| "grad_norm": 1.0048359632492065, | |
| "learning_rate": 2.9008380972863693e-05, | |
| "loss": 0.7345, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.8286252354048964, | |
| "grad_norm": 1.1321660280227661, | |
| "learning_rate": 2.8940595482316883e-05, | |
| "loss": 0.6374, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.847457627118644, | |
| "grad_norm": 0.9749424457550049, | |
| "learning_rate": 2.8870653693769233e-05, | |
| "loss": 0.6937, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.8662900188323918, | |
| "grad_norm": 0.9839215874671936, | |
| "learning_rate": 2.8798566425649478e-05, | |
| "loss": 0.6885, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.8851224105461394, | |
| "grad_norm": 1.0036780834197998, | |
| "learning_rate": 2.8724344828244014e-05, | |
| "loss": 0.6463, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.903954802259887, | |
| "grad_norm": 0.9374542832374573, | |
| "learning_rate": 2.864800038197219e-05, | |
| "loss": 0.6853, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.9227871939736346, | |
| "grad_norm": 1.1694684028625488, | |
| "learning_rate": 2.8569544895610536e-05, | |
| "loss": 0.6399, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.9416195856873822, | |
| "grad_norm": 1.0283533334732056, | |
| "learning_rate": 2.848899050446624e-05, | |
| "loss": 0.6486, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.96045197740113, | |
| "grad_norm": 0.9280611276626587, | |
| "learning_rate": 2.840634966850006e-05, | |
| "loss": 0.6377, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.9792843691148776, | |
| "grad_norm": 0.8847799897193909, | |
| "learning_rate": 2.832163517039903e-05, | |
| "loss": 0.6585, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.9981167608286252, | |
| "grad_norm": 1.08964204788208, | |
| "learning_rate": 2.8234860113599316e-05, | |
| "loss": 0.6149, | |
| "step": 265 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 1330, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 5, | |
| "save_steps": 2000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 3.639319352668324e+17, | |
| "train_batch_size": 2, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |